repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
---|---|---|---|---|---|---|---|---|
SebastienPittet/cavelink
|
setup.py
|
Python
|
mit
| 1,270 | 0 |
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.1',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage
|
of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['p
|
ython-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
fxstein/ISYlib-python
|
bin/isy_net_wol.py
|
Python
|
bsd-2-clause
| 1,132 | 0.013251 |
#!/usr/local/bin/python3.4
"""
Simple example to send a WoL to a registared system on the ISY
if this script is call without any arg
we print a list of registared WoL systems
if we have any args we treat them as registared WoL Id's
and attempt to send a WoL packet
"""
__author__ = "Peter Shipley"
imp
|
ort sys
import ISY
from ISY.IsyExceptionClass import IsyResponseError, IsyValueError
def main(isy):
if len(sys.argv[1:]) > 0:
for a in sys.argv[1:] :
try :
isy.n
|
et_wol(a)
except (IsyValueError, IsyResponseError) as errormsg :
print("problem sending WOL to {!s} : {!s}".format(a, errormsg))
continue
else :
print("WOL sent to {!s}".format(a))
else :
pfmt = "{:<5}{:<16} {:<20}"
print(pfmt.format("Id", "Name", "Mac"))
print(pfmt.format("-" * 4, "-" * 20, "-" * 20))
for w in isy.net_wol_iter():
if "id" in w :
print(pfmt.format(w['id'], w['name'], w['mac']))
if __name__=="__main__":
myisy= ISY.Isy(parsearg=1)
main(myisy)
exit(0)
|
mdlawson/autojump
|
install.py
|
Python
|
gpl-3.0
| 5,781 | 0.000346 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import platform
import shutil
import sys
sys.path.append('bin')
from autojump_argparse import ArgumentParser
SUPP
|
ORTED_SHELLS = ('bash', 'zsh', 'fish')
def cp(src, dest, dryrun=False):
print("copying file: %s -> %s" % (src, dest))
if not dryrun:
shutil.copy(src, dest)
def get_shell():
return os.path.b
|
asename(os.getenv('SHELL', ''))
def mkdir(path, dryrun=False):
print("creating directory:", path)
if not dryrun and not os.path.exists(path):
os.makedirs(path)
def modify_autojump_sh(etc_dir, dryrun=False):
"""Append custom installation path to autojump.sh"""
custom_install = "\
\n# check custom install \
\nif [ -s %s/autojump.${shell} ]; then \
\n\tsource %s/autojump.${shell} \
\nfi\n" % (etc_dir, etc_dir)
with open(os.path.join(etc_dir, 'autojump.sh'), 'a') as f:
f.write(custom_install)
def parse_arguments():
default_user_destdir = os.path.join(os.path.expanduser("~"), '.autojump')
default_user_prefix = ''
default_user_zshshare = 'functions'
default_system_destdir = '/'
default_system_prefix = '/usr/local'
default_system_zshshare = '/usr/share/zsh/site-functions'
parser = ArgumentParser(
description='Installs autojump globally for root users, otherwise \
installs in current user\'s home directory.')
parser.add_argument(
'-n', '--dryrun', action="store_true", default=False,
help='simulate installation')
parser.add_argument(
'-f', '--force', action="store_true", default=False,
help='skip root user, shell type, Python version checks')
parser.add_argument(
'-d', '--destdir', metavar='DIR', default=default_user_destdir,
help='set destination to DIR')
parser.add_argument(
'-p', '--prefix', metavar='DIR', default=default_user_prefix,
help='set prefix to DIR')
parser.add_argument(
'-z', '--zshshare', metavar='DIR', default=default_user_zshshare,
help='set zsh share destination to DIR')
parser.add_argument(
'-s', '--system', action="store_true", default=False,
help='install system wide for all users')
args = parser.parse_args()
if not args.force:
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
print("Python v2.6+ or v3.0+ required.", file=sys.stderr)
sys.exit(1)
if get_shell() not in SUPPORTED_SHELLS:
print("Unsupported shell: %s" % os.getenv('SHELL'),
file=sys.stderr)
sys.exit(1)
if args.system and os.geteuid() != 0:
print("Please rerun as root for system-wide installation.",
file=sys.stderr)
sys.exit(1)
if args.destdir != default_user_destdir \
or args.prefix != default_user_prefix \
or args.zshshare != default_user_zshshare:
args.custom_install = True
else:
args.custom_install = False
if args.system:
if args.custom_install:
print("Custom paths incompatible with --system option.",
file=sys.stderr)
sys.exit(1)
args.destdir = default_system_destdir
args.prefix = default_system_prefix
args.zshshare = default_system_zshshare
return args
def print_post_installation_message(etc_dir):
if get_shell() == 'fish':
aj_shell = '%s/autojump.fish' % etc_dir
source_msg = "if test -f %s; . %s; end" % (aj_shell, aj_shell)
# TODO(ting|2013-12-31): check config.fish location on OSX
rcfile = '~/.config/fish/config.fish'
else:
aj_shell = '%s/autojump.sh' % etc_dir
source_msg = "[[ -s %s ]] && source %s" % (aj_shell, aj_shell)
if platform.system() == 'Darwin' and get_shell() == 'bash':
rcfile = '~/.profile'
else:
rcfile = '~/.%src' % get_shell()
print("\nPlease manually add the following line(s) to %s:" % rcfile)
print('\n\t' + source_msg)
if get_shell() == 'zsh':
print("\n\tautoload -U compinit && compinit -u")
print("\nPlease restart terminal(s) before running autojump.\n")
def main(args):
if args.dryrun:
print("Installing autojump to %s (DRYRUN)..." % args.destdir)
else:
print("Installing autojump to %s ..." % args.destdir)
bin_dir = os.path.join(args.destdir, args.prefix, 'bin')
etc_dir = os.path.join(args.destdir, 'etc/profile.d')
doc_dir = os.path.join(args.destdir, args.prefix, 'share/man/man1')
icon_dir = os.path.join(args.destdir, args.prefix, 'share/autojump')
zshshare_dir = os.path.join(args.destdir, args.zshshare)
mkdir(bin_dir, args.dryrun)
mkdir(etc_dir, args.dryrun)
mkdir(doc_dir, args.dryrun)
mkdir(icon_dir, args.dryrun)
mkdir(zshshare_dir, args.dryrun)
cp('./bin/autojump', bin_dir, args.dryrun)
cp('./bin/autojump_argparse.py', bin_dir, args.dryrun)
cp('./bin/autojump_data.py', bin_dir, args.dryrun)
cp('./bin/autojump_utils.py', bin_dir, args.dryrun)
cp('./bin/autojump.sh', etc_dir, args.dryrun)
cp('./bin/autojump.bash', etc_dir, args.dryrun)
cp('./bin/autojump.fish', etc_dir, args.dryrun)
cp('./bin/autojump.zsh', etc_dir, args.dryrun)
cp('./bin/_j', zshshare_dir, args.dryrun)
cp('./bin/icon.png', icon_dir, args.dryrun)
cp('./docs/autojump.1', doc_dir, args.dryrun)
if args.custom_install:
modify_autojump_sh(etc_dir, args.dryrun)
print_post_installation_message(etc_dir)
if __name__ == "__main__":
sys.exit(main(parse_arguments()))
|
EternityForest/KaithemAutomation
|
kaithem/data/modules/Beholder/main.py
|
Python
|
gpl-3.0
| 787 | 0.01906 |
## Code outside the data string, and the setup and action blocks is ignored
## If manually editing, you m
|
ust reload the code. Delete the resource timestamp so kaithem knows it's new
__data__="""
{continual: false, enable: true, once: true, priority: interactive, rate-limit: 0.0,
resource-timestamp: 1645141613510257, resource-type: event}
"""
__trigger__='False'
if __name__=='__setup__':
#This code runs once when the even
|
t loads. It also runs when you save the event during the test compile
#and may run multiple times when kaithem boots due to dependancy resolution
__doc__=''
def nbr():
return(50, '<a href="/pages/Beholder/ui"><i class="icofont-castle"></i>Beholder</a>')
kaithem.web.navBarPlugins['Beholder']=nbr
def eventAction():
pass
|
clebergnu/autotest
|
client/common_lib/hosts/__init__.py
|
Python
|
gpl-2.0
| 435 | 0 |
#
|
Copyright 2009 Google Inc. Released under the GPL v2
"""This is a convenience module to import all available types of hosts.
Implementation details:
You should 'import hosts' instead of importing every available host module.
"""
from autotest_lib.client.common_lib import utils
import base_classes
Host = utils.import_site_class(
__file__, "autotest_lib.client.common_lib.hosts.site_host", "SiteHost",
base_classes
|
.Host)
|
zstackorg/zstack-woodpecker
|
integrationtest/vm/multihosts/volumes/test_snapshot_resize_vm.py
|
Python
|
apache-2.0
| 1,782 | 0.002806 |
'''
New Integration Test for resizing root volume.
@author: czhou25
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.volume_operations as vol_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header
import time
import os
vm = None
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
global vm
vm_creation_option = test_util.VmOption()
image_name = os.environ.get('imageName_net')
l3_name = os.environ.get('l3VlanNetworkName1')
vm = test_stub.create_vm("test_resize_vm", image_name, l3_name)
test_obj_dict.add_vm(vm)
vm.check()
vm.stop()
vm.check()
vol_size = test_lib.lib_get_root_volume(vm.get_vm()).size
volume_uuid = test_lib.lib_get_root_volume(vm.get_vm()).uuid
set_size = 1024*1024*1024*5
snapshots = test_obj_dict.get_volume_snapshot(volume_uuid)
snapshots.set_utility_vm(vm)
snapshots.create_snapshot('create_snapshot1')
snapshots.check()
vol_ops.resize_volume(volume_uuid, set_size)
vm.update()
vol_size_after = test_lib.lib_get_root_volume(vm.get_vm()).size
if set_size != vol_size_after:
test_util.test_fail('Resize Root Volume failed, size = %s' % vol_size_
|
after)
snapshots.delete()
test_obj_dict.rm_volume_snapshot(snapshots)
test_lib.lib_error_cleanup(test_obj_dict)
test_util.test_pass('Resize VM Snapshot Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_clea
|
nup(test_obj_dict)
|
mPowering/django-orb
|
orb/fixtures/__init__.py
|
Python
|
gpl-3.0
| 2,306 | 0.000869 |
# -*- coding: utf-8 -*-
"""
pytest fixtures
"""
import pytest
from django.contrib.auth.models import User
from orb.models import Category, Tag, UserProfile
from orb.peers.models import Peer
from orb.resources.tests.factory import resource_factory
pytestmark = pytest.mark.django_db
@pytest.fixture
def testing_user():
user, _ = User.objects.get_or_create(username="tester")
user.set_password("password")
user.save()
yield user
@pytest.fixture
def testing_profile(testing_user):
yield UserProfile.objects.create(user=testing_user)
@pytest.fixture()
def import_user():
user, _ = User.objects.get_or_create(username="importer")
user.set_password("password")
user.save()
yield user
@pytest.fixture
def importer_profile(import_user):
yield UserProfile.objects.create(user=import_user)
@pytest.fixture
def sample_category():
category, _ = Category.objects.get_or_create(name="test category")
yield category
@pytest.fixture
def sample_tag(sample_category, testing_user):
tag, _ = Tag.objects.get_or_create(name="test tag", defaults={
"category": sample_category,
"create_user": testing_user,
"update_user": testing_user,
})
yield tag
@pytest.fixture
def role_category():
category, _ = Category.objects.get_or_create(name="audience")
yield category
@pytest.fixture
def role_tag(role_category, testing_user):
tag, _ = Tag.objects.get_or_create(name="cadre", defaults={
"category": role_category
|
,
"create_user": testing_user,
"update_user": testing_user,
})
assert Tag.tags.roles()
yield tag
@pytest.fixture
def test_resource(testing_user):
yield resource_factory(
user=testing_user,
title=u"
|
Básica salud del recién nacido",
description=u"Básica salud del recién nacido",
)
@pytest.fixture(scope="session")
def test_peer():
peer = Peer.peers.create(name="Distant ORB", host="http://www.orb.org/")
yield peer
@pytest.fixture(scope="session")
def remote_resource(import_user, test_peer):
"""Fixture for a remotely created resource"""
yield resource_factory(
user=import_user,
title=u"A remote resource",
description=u"<p>A remote resource</p>",
source_peer=test_peer,
)
|
agrajaghh/duplicati
|
guiTests/guiTest.py
|
Python
|
lgpl-2.1
| 8,072 | 0.002602 |
import os
import sys
import shutil
import errno
import time
import hashlib
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
if "TRAVIS_BUILD_NUMBER" in os.environ:
if "SAUCE_USERNAME" not in os.environ:
print "No sauce labs login credentials found. Stopping tests..."
sys.exit(0)
capabilities = {'browserName': "firefox"}
capabilities['platform'] = "Windows 7"
capabilities['version'] = "48.0"
capabilities['screenResolution'] = "1280x1024"
capabilities["build"] = os.environ["TRAVIS_BUILD_NUMBER"]
capabilities["tunnel-identifier"] = os.environ["TRAVIS_JOB_NUMBER"]
# connect to sauce labs
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
hub_url = "%s:%s@localhost:4445" % (username, access_key)
driver = webdriver.Remote(command_executor="http://%s/wd/hub" % hub_url, desired_capabilities=capabilities)
else:
# local
print "Using LOCAL webdriver"
profile = webdriver.FirefoxProfile()
profile.set_preference("intl.accept_languages", "en")
driver = webdriver.Firefox(profile)
driver.maximize_window()
def write_random_file(size, filename):
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
with open(filename, 'wb') as fout:
fout.write(os.urandom(size))
def sha1_file(filename):
BLOCKSIZE = 65536
hasher = hashlib.sha1()
with open(filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
return hasher.hexdigest()
def sha1_folder(folder):
sha1_dict = {}
for root, dirs, files in os.walk(folder):
for filename in files:
file_path = os.path.join(root, filename)
sha1 = sha1_file(file_path)
relative_file_path = os.path.relpath(file_path, folder)
sha1_dict.update({relative_file_path: sha1})
return sha1_dict
def wait_for_text(time, xpath, text):
WebDriverWait(driver, time).until(expected_conditions.text_to_be_present_in_element((By.XPATH, xpath), text))
BACKUP_NAME = "BackupName"
PASSWORD = "the_backup_password_is_really_long_and_safe"
SOURCE_FOLDER = os.path.abspath("duplicati_gui_test_source")
DESTINATION_FOLDER = os.path.abspath("duplicati_gui_test_destination")
DESTINATION_FOLDER_DIRECT_RESTORE = os.path.abspath("duplicati_gui_test_destination_direct_restore")
RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_restore")
DIRECT_RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_direct_restore")
# wait 5 seconds for duplicati server to start
time.sleep(5)
driver.implicitly_wait(10)
driver.get("http://localhost:8200/ngax/index.html")
if "Duplicati" not in driver.title:
raise Exception("Unable to load duplicati GUI!")
# Create and hash random files in the source folder
write_random_file(1024 * 1024, SOURCE_FOLDER + os.sep + "1MB.test")
write_random_file(100 * 1024, SOURCE_FOLDER + os.sep + "subfolder" + os.sep + "100KB.test")
sha1_source = sha1_folder(SOURCE_FOLDER)
# Dismiss the password request
driver.find_element_by_link_text("No, my machine has only a single account").click()
# Add new backup
driver.find_element_by_link_text("Add backup").click()
# Choose the "add new" option
driver.find_element_by_id("blank").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
# Add new backup - General page
time.sleep(1)
driver.find_element_by_id("name").send_keys(BACKUP_NAME)
driver.find_element_by_id("passphrase").send_keys(PASSWORD)
driver.find_element_by_id("repeat-passphrase").send_keys(PASSWORD)
driver.find_element_by_id("nextStep1").click()
# Add new backup - Destination page
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER)
driver.find_element_by_id("nextStep2").click()
# Add new backup - Source Data page
driver.find_element_by_id("sourcePath").send_keys(os.path.abspath(SOURCE_FOLDER) + os.sep)
driver.find_element_by_id("sourceFolderPathAdd").click()
driver.find_element_by_id("nextStep3").click()
# Add new backup - Schedule page
useScheduleRun = driver.find_element_by_id("useScheduleRun")
if useScheduleRun.is_selected():
useScheduleRun.click()
driver.find_element_by_id("nextStep4").click()
# Add new backup - Options page
driver.find_element_by_id("save").click()
# Run the backup job and wait for finish
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//dl[@class='taskmenu']
|
/dd/p/span[contains(text(),'Run now')]") if n.is_displayed()][0].click()
wait_for_text(60, "//div[@class='task ng-scope']/dl[2]/dd[1]", "(took ")
# Restore
if len([n for n in driver.find_elemen
|
ts_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()]) == 0:
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()][0].click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash restored files
sha1_restore = sha1_folder(RESTORE_FOLDER)
# cleanup: delete source and restore folder and rename destination folder for direct restore
shutil.rmtree(SOURCE_FOLDER)
shutil.rmtree(RESTORE_FOLDER)
os.rename(DESTINATION_FOLDER, DESTINATION_FOLDER_DIRECT_RESTORE)
# direct restore
driver.find_element_by_link_text("Restore").click()
# Choose the "restore direct" option
driver.find_element_by_id("direct").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
time.sleep(1)
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER_DIRECT_RESTORE)
driver.find_element_by_id("nextStep1").click()
driver.find_element_by_id("password").send_keys(PASSWORD)
driver.find_element_by_id("connect").click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
time.sleep(1)
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(DIRECT_RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash direct restore files
sha1_direct_restore = sha1_folder(DIRECT_RESTORE_FOLDER)
print "Source hashes: " + str(sha1_source)
print "Restore hashes: " + str(sha1_restore)
print "Direct Restore hashes: " + str(sha1_direct_restore)
# Tell Sauce Labs to stop the test
driver.quit()
if not (sha1_source == sha1_restore and sha1_source == sha1_direct_restore):
sys.exit(1) # return with error
|
graphql-python/graphene
|
examples/starwars_relay/data.py
|
Python
|
mit
| 1,593 | 0.001255 |
data = {}
def setup():
global data
from .schema import Ship, Faction
xwing = Ship(id="1", name="X-Wing")
ywing = Ship(id="2", name="Y-Wing")
awing = Ship(id="3", name="A-Wing")
# Yeah, technically it's Corellian. But it flew in the service of the rebels,
# so for the purposes of this demo it's a rebel ship.
falcon = Ship(id="4", name="Millennium Falcon")
homeOne = Ship(id="5", name="Home One")
tieFighter = Ship(id="6", name="TIE Fighter")
tieInterceptor = Ship(id="7", name="TIE Interceptor")
executor = Ship(id="8", name="Executor")
rebels = Faction(
id="1", name="Alliance to Restore the Republic", ships=["1", "2", "3", "4", "5"]
)
empire = Faction(id="2", name="Galactic Empire", ships=["6", "7", "8"])
data = {
"Faction": {"1": rebels, "2": empire},
"Ship": {
"1": xwing,
"2": ywing,
"3": awing,
"4": falcon,
"5": homeOne,
"6": tieFighter,
"7": tieInterceptor,
"8": executor,
},
}
def create_ship(ship_name, faction_id):
from .schema import Ship
next_ship = len(data["Ship"].keys()) + 1
new_ship = Ship(id=str(next_ship), name=ship_name)
data["Ship"][new_ship.id] = new_ship
data["Faction"][faction_id].ships.append(new_ship.id)
return new_ship
def get_ship(_id):
return data["Ship"][_id]
def get_faction(_id):
return data["Faction"][_id]
def get_rebel
|
s():
return
|
get_faction("1")
def get_empire():
return get_faction("2")
|
jordanjoz1/flickr-views-counter
|
count_views.py
|
Python
|
mit
| 4,590 | 0 |
import flickrapi
import csv
import sys
import datetime
import argparse
import os
# environment variable keys
ENV_KEY = 'FLICKR_API_KEY'
ENV_SECRET = 'FLICKR_API_SECRET'
MAX_PHOTOS_PER_PAGE = 500
# column headers for output file
columns = ['Title', 'Upload date', 'photo_id', 'url', 'Description',
'View count', 'Favs count', 'Comments count']
# setup flickr api
flickr = None
def main():
global flickr
# parse arguments
userId, fname, api_key, api_secret = parseArgs()
# check if user provided api key/secret
if not api_key or not api_secret:
# try to get key/secret from environment variables
api_key = os.getenv(ENV_KEY)
api_secret = os.getenv(ENV_SECRET)
# exit if we still dont' have key/secret
if not api_key or not api_secret:
sys.exit('No Flickr API key and secret. Either provide the key '
'and secret as options (--key and --secret) or set them '
'as environment variables.')
# initialize flickr api
flickr = flickrapi.FlickrAPI(api_key, api_secret)
# get number of photos for the user
userInfo = flickr.people.getInfo(user_id=userId)
count = int(userInfo[0].find('photos').find('count').text)
pages = count / MAX_PHOTOS_PER_PAGE + 1
print('Counting views for %d photos...' % (count))
# get list of photos
photo_pages = []
for page in range(1, pages + 1):
photo_pages.append(
flickr.photo
|
s.search(
user_id=userId, per_page=str(MAX_PHO
|
TOS_PER_PAGE), page=page))
# get view count for each photo
data = []
for photo_page in photo_pages:
for photo in photo_page[0]:
data.append(get_photo_data(photo.get('id')))
# write counts to output
if (fname is not None):
rows = create_rows_from_data(data)
write_to_csv(fname, columns, rows)
print('Photo data successfully written to %s (this could take hours '
'if you have hundreds of photos)' % (fname))
# display view count for photos
print('Total photo views: %d' % (calc_total_views_from_data(data)))
def parseArgs():
# parse arguments and do error checking
parser = argparse.ArgumentParser()
parser.add_argument('user_id',
help='The id of the user whose total views will be '
'counted.',
default='.')
parser.add_argument('--output',
help='Name of the output file',
default=None)
parser.add_argument('--key',
help='Flickr API key (use once for setup)',
default=None)
parser.add_argument('--secret',
help='Flickr API secret (use once for setup)',
default=None)
args = parser.parse_args()
return args.user_id, args.output, args.key, args.secret
def calc_total_views_from_data(data):
total = 0
for photo in data:
total += int(photo['info'][0].attrib['views'])
return total
def create_rows_from_data(data):
rows = []
for photo in data:
title = photo['info'][0].find('title').text
upload_date = photo['info'][0].get('dateuploaded')
upload_date = datetime.datetime.fromtimestamp(
int(upload_date)).strftime('%Y-%m-%d %H:%M:%S')
photo_id = photo['info'][0].get('id')
url = photo['info'][0].find('urls')[0].text
description = photo['info'][0].find('description').text
if description is None:
description = ''
views = photo['info'][0].get('views')
favs = photo['favs'][0].get('total')
comments = photo['info'][0].find('comments').text
# output as delimited text
row = [title, upload_date, str(photo_id), url, description,
str(views), str(favs), str(comments)]
rows.append(row)
return rows
def get_photo_data(photo_id):
info = flickr.photos.getinfo(photo_id=photo_id)
favs = flickr.photos.getFavorites(photo_id=photo_id)
return {'info': info, 'favs': favs}
def write_to_csv(fname, header, rows):
with open(fname, 'wb') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=',', quotechar='|',
quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(header)
for row in rows:
csvwriter.writerow(
[s.encode("utf-8").replace(',', '').replace('\n', '')
for s in row])
if __name__ == "__main__":
main()
|
ygol/odoo
|
addons/mrp_subcontracting/models/stock_move_line.py
|
Python
|
agpl-3.0
| 1,039 | 0.002887 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class StockMoveLine(models.Model):
_inherit = 'stock.move.line'
@api.model_create_multi
def create(self, vals_list):
records = super(StockMoveLine, self).create(vals_list)
records.filtered(lambda ml: ml.move_id.is_subcontract).move_id._check_overprocessed_subcontract_qty()
return records
def write(self, values):
res = super(StockMoveLine, self).write(values)
self.filtered(lambda ml: ml.move_id.is_subcontract).move_id._check_overprocessed_subcontract_qty()
return res
def _should_bypass_reservation(self, location):
""" If the move line is subcontracted then ignore the reservation. """
should_bypass_reservation = super(StockMoveLine, self)._should_bypass_reservation(location)
if not should_bypass_r
|
eservation and self.move_id.is_subcontract:
re
|
turn True
return should_bypass_reservation
|
Blackclaws/client
|
src/downloadManager/__init__.py
|
Python
|
gpl-3.0
| 3,779 | 0.00688 |
from PyQt4.QtNetwork import QNetworkAccessManager, QNetworkRequest
from PyQt4 import QtGui, QtCore
import urllib2
import logging
import os
import util
import warnings
logger= logging.getLogger(__name__)
VAULT_PREVIEW_ROOT = "http://content.faforever.com/faf/vault/map_previews/small/"
class downloadManager(QtCore.QObject):
''' This class allows downloading stuff in the background'''
def __init__(self, parent = None):
self.client = parent
self.nam = QNetworkAccessManager()
self.nam.finished.connect(self.finishedDownload)
self.modRequests = {}
self.mapRequests = {}
self.mapRequestsItem = []
def finishedDownload(self,reply):
''' finishing downloads '''
urlstring = reply.url().toString()
reqlist = []
if urlstring in self.mapRequests: reqlist = self.mapRequests[urlstring]
if urlstring in self.modRequests: reqlist = self.modRequests[urlstring]
if reqlist:
#save the map from cache
name = os.path.basename(reply.url().toString())
pathimg = os.path.join(util.CACHE_DIR, name)
img = QtCore.QFile(pathimg)
img.open(QtCore.QIODevice.WriteOnly)
img.write(reply.readAll())
img.close()
if os.path.exists(pathimg):
#Create alpha-mapped preview image
try:
pass # the server already sends 100x100 pic
# img = QtGui.QImage(pathimg).scaled(100,100)
# img.save(pathimg)
except:
pathimg = "games/unknown_map.png"
logger.info("Failed to resize " + name)
else :
pathimg = "games/unknown_map.png"
logger.debug("Web Preview failed for: " + name)
logger.debug("Web Preview used for: " + name)
for requester in reqlist:
if requester:
if requester in self.mapRequestsItem:
requester.setIcon(0, util.icon(pathimg, False))
self.mapRequestsItem.remove(requester)
else:
requester.setIcon(util.icon(pathimg, False))
if urlstring in self.mapRequests: del self.mapRequests[urlstring]
if urlstring in self.modRequests: del self.modRequests[urlstring]
def downloadMap(self, name, requester, item=False):
'''
Downloads a preview image from the web for the given map name
'''
#This is done so generated previews always have a lower case name. This doesn't solve the underlying problem (case folding Windows vs. Unix vs. FAF)
name = name.lower()
if len(name) == 0:
return
url = QtCore.QUrl(VAULT_PREVIEW_ROOT + urllib2.quote(name) + ".png")
if not url.toString() in self.mapRequests:
logger.debug("Searching map preview for: " + name)
self.mapRequests[url.toStr
|
ing()] = []
request = QNetworkRequest(url)
self.nam.get(request)
self.mapRequests[url.toString()].append(requester)
else :
self.mapRequests[url.toString()].append(requester)
if item:
self.mapRequestsItem.append(requester)
def downloadModPreview(self, strurl, requester):
u
|
rl = QtCore.QUrl(strurl)
if not url.toString() in self.modRequests:
logger.debug("Searching mod preview for: " + os.path.basename(strurl).rsplit('.',1)[0])
self.modRequests[url.toString()] = []
request = QNetworkRequest(url)
self.nam.get(request)
self.modRequests[url.toString()].append(requester)
|
oubiwann/txjsonrpc
|
examples/ssl/client.py
|
Python
|
mit
| 1,418 | 0.009168 |
from __future__ import print_function
import logging
from twisted.internet import reactor, ssl
from txjsonrpc.web.jsonrpc import Proxy
from OpenSSL import SSL
from twisted.python import log
def printValue(value):
print("Result: %s" % str(value))
def printError(error):
print('error', error)
def shutDown(data):
print("Shutting down reactor...")
reactor.stop()
|
def verifyCallback(connection, x509, errnum,
|
errdepth, ok):
log.msg(connection.__str__())
if not ok:
log.msg('invalid server cert: %s' % x509.get_subject(), logLevel=logging.ERROR)
return False
else:
log.msg('good server cert: %s' % x509.get_subject(), logLevel=logging.INFO)
return True
class AltCtxFactory(ssl.ClientContextFactory):
def getContext(self):
#self.method = SSL.SSLv23_METHOD
ctx = ssl.ClientContextFactory.getContext(self)
ctx.set_verify(SSL.VERIFY_PEER, verifyCallback)
ctx.load_verify_locations("cacert.pem")
#ctx.use_certificate_file('keys/client.crt')
#ctx.use_privatekey_file('keys/client.key')
return ctx
import sys
log.startLogging(sys.stdout)
#proxy = Proxy('https://127.0.0.1:7443/', ssl_ctx_factory=AltCtxFactory)
proxy = Proxy('https://127.0.0.2:7443/', ssl_ctx_factory=AltCtxFactory)
d = proxy.callRemote('add', 3, 5)
d.addCallback(printValue).addErrback(printError).addBoth(shutDown)
reactor.run()
|
MungoRae/home-assistant
|
homeassistant/components/scene/lifx_cloud.py
|
Python
|
apache-2.0
| 2,931 | 0 |
"""
Support for LIFX Cloud scenes.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/scene.lifx_cloud/
"""
import asyncio
import logging
import voluptuous as vol
im
|
port aiohttp
import async_timeout
from homeassistant.components.scene import Scene
from homeassistant.const import (CONF_PLATFORM, CONF_TOKEN, CONF_TIMEOUT)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.aiohttp_client import (async_get_clien
|
tsession)
_LOGGER = logging.getLogger(__name__)
LIFX_API_URL = 'https://api.lifx.com/v1/{0}'
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'lifx_cloud',
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
})
# pylint: disable=unused-argument
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the scenes stored in the LIFX Cloud."""
token = config.get(CONF_TOKEN)
timeout = config.get(CONF_TIMEOUT)
headers = {
"Authorization": "Bearer %s" % token,
}
url = LIFX_API_URL.format('scenes')
try:
httpsession = async_get_clientsession(hass)
with async_timeout.timeout(timeout, loop=hass.loop):
scenes_resp = yield from httpsession.get(url, headers=headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.exception("Error on %s", url)
return False
status = scenes_resp.status
if status == 200:
data = yield from scenes_resp.json()
devices = []
for scene in data:
devices.append(LifxCloudScene(hass, headers, timeout, scene))
async_add_devices(devices)
return True
elif status == 401:
_LOGGER.error("Unauthorized (bad token?) on %s", url)
return False
_LOGGER.error("HTTP error %d on %s", scenes_resp.status, url)
return False
class LifxCloudScene(Scene):
"""Representation of a LIFX Cloud scene."""
def __init__(self, hass, headers, timeout, scene_data):
"""Initialize the scene."""
self.hass = hass
self._headers = headers
self._timeout = timeout
self._name = scene_data["name"]
self._uuid = scene_data["uuid"]
@property
def name(self):
"""Return the name of the scene."""
return self._name
@asyncio.coroutine
def async_activate(self):
"""Activate the scene."""
url = LIFX_API_URL.format('scenes/scene_id:%s/activate' % self._uuid)
try:
httpsession = async_get_clientsession(self.hass)
with async_timeout.timeout(self._timeout, loop=self.hass.loop):
yield from httpsession.put(url, headers=self._headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.exception("Error on %s", url)
|
sidnarayanan/IntelROCCS
|
Detox/python/siteProperties.py
|
Python
|
mit
| 12,964 | 0.011571 |
#====================================================================================================
# C L A S S E S concerning the site description
#====================================================================================================
#---------------------------------------------------------------------------------------------------
"""
Class: SiteProperties(siteName='')
Each site will be fully described for our application in this class.
"""
#---------------------------------------------------------------------------------------------------
import time, statistics
class SiteProperties:
"A SiteProperties defines all needed site properties."
def __init__(self, siteName):
self.name = siteName
self.datasetRanks = {}
self.rankSum = 0
self.datasetSizes = {}
self.dsetIsValid = {}
self.dsetIsCustodial = {}
self.dsetLastCopy = {}
self.dsetIsPartial = {}
self.deprecated = {}
self.dsetReqTime = {}
self.dsetUpdTime = {}
self.dsetIsDone = {}
self.dsetNotUsedOnTape = {}
self.wishList = []
self.datasetsToDelete = []
self.protectedList = []
self.siteSizeGbV = 0
self.spaceTakenV = 0
self.spaceNotUsed = 0
self.spaceLCp = 0
self.space2free = 0
self.deleted = 0
self.protected = 0
self.globalDsetIndex = 0
self.epochTime = int(time.time())
def addDataset(self,dset,rank,size,valid,partial,custodial,depr,reqtime,updtime,wasused,isdone):
self.dsetIsValid[dset] = valid
self.dsetIsPartial[dset] = partial
self.dsetIsCustodial[dset] = custodial
self.datasetRanks[dset] = rank
self.datasetSizes[dset] = size
if depr:
self.deprecated[dset] = depr
self.spaceTakenV = self.spaceTakenV + size
self.dsetIsDone[dset] = isdone
self.dsetReqTime[dset] = reqtime
self.dsetUpdTime[dset] = updtime
self.rankSum = self.rankSum + rank*size
if wasused == 0:
self.spaceNotUsed = self.spaceNotUsed + size
def makeWishList(self, dataPropers, ncopyMin, banInvalid=True):
space = 0
self.wishList = []
space2free = self.space2free
addedExtra = 0
counter = 0
for datasetName in sorted(self.datasetRanks.keys(), cmp=self.compare):
counter = counter + 1
if counter < self.globalDsetIndex:
continue
if space > (space2free-self.deleted):
break
if datasetName in self.datasetsToDelete:
continue
if datasetName in self.protectedList:
continue
#custodial set can't be on deletion wish list
if self.dsetIsCustodial[datasetName] :
continue
#if dataPropers[datasetName].daysSinceUsed() > 540:
if dataPropers[datasetName].isFullOnTape():
#delta = (self.epochTime - self.dsetUpdTime[datasetName])/(60*60*24)
if dataPropers[datasetName].getGlobalRank() > 500:
#if delta > 500:
space = space + self.datasetSizes[datasetName]
self.wishList.append(datasetName)
dataPropers[datasetName].kickFromPool = True
print "exp at " + self.name + ": " + datasetName
#pri
|
nt datasetName
#addedExtra = addedExtra + 1
continue
if "/REC
|
O" in datasetName:
delta = (self.epochTime - self.dsetUpdTime[datasetName])/(60*60*24)
#if dataPropers[datasetName].daysSinceUsed() > 180 and delta>180:
if delta > 180:
space = space + self.datasetSizes[datasetName]
self.wishList.append(datasetName)
dataPropers[datasetName].kickFromPool = True
print "RECO " + self.name + ": " + datasetName
continue
else:
continue
#non-valid dataset can't be on deletion list
if banInvalid == True:
if not self.dsetIsValid[datasetName]:
continue
dataPr = dataPropers[datasetName]
if dataPr.nSites() > ncopyMin:
space = space + self.datasetSizes[datasetName]
self.wishList.append(datasetName)
self.globalDsetIndex = counter
def hasMoreToDelete(self, dataPropers, ncopyMin, banInvalid):
counter = 0
if self.globalDsetIndex >= len(self.datasetRanks.keys()):
return False
for datasetName in sorted(self.datasetRanks.keys(), cmp=self.compare):
counter = counter + 1
if counter < self.globalDsetIndex:
continue
if '/MINIAOD' in datasetName:
ncopyMinTemp = 3
else:
ncopyMinTemp = ncopyMin
if datasetName in self.datasetsToDelete:
continue
if datasetName in self.protectedList:
continue
#custodial set can't be on deletion wish list
if self.dsetIsCustodial[datasetName] :
continue
#non-valid dataset can't be on deletion list
if banInvalid == True:
if not self.dsetIsValid[datasetName]:
continue
if datasetName in self.wishList:
continue
dataPr = dataPropers[datasetName]
if dataPr.nSites() <= ncopyMinTemp:
continue
return True
return False
def onWishList(self,dset):
if dset in self.wishList:
return True
return False
def onProtectedList(self,dset):
if dset in self.protectedList:
return True
return False
def wantToDelete(self):
if self.deleted < self.space2free:
return True
else:
return False
def grantWish(self,dset):
if dset in self.protectedList:
return False
if dset in self.datasetsToDelete:
return False
#if self.deleted > self.space2free:
# return False
self.datasetsToDelete.append(dset)
self.deleted = self.deleted + self.datasetSizes[dset]
return True
def revokeWish(self,dset):
if dset in self.datasetsToDelete:
self.datasetsToDelete.remove(dset)
self.deleted = self.deleted - self.datasetSizes[dset]
def canBeLastCopy(self,dset,banInvalid):
if not banInvalid:
return True
#can't be partial dataset
if dset not in self.dsetIsPartial:
return False
if self.dsetIsPartial[dset] :
return False
#can't be non-valid dataset
if not self.dsetIsValid[dset]:
return False
return True
def pinDataset(self,dset):
if dset in self.datasetsToDelete:
return False
#can't pin partial dataset
if self.dsetIsPartial[dset] :
return False
#can't pin non-valid dataset
if not self.dsetIsValid[dset]:
return False
self.protectedList.append(dset)
self.protected = self.protected + self.datasetSizes[dset]
if dset in self.wishList:
self.wishList.remove(dset)
return True
def lastCopySpace(self,datasets,nCopyMin):
space = 0
self.dsetLastCopy = {}
for dset in self.datasetSizes.keys():
if dset in self.datasetsToDelete:
continue
dataset = datasets[dset]
remaining = dataset.nSites() - dataset.nBeDeleted()
if remaining <= nCopyMin:
self.dsetLastCopy[dset] = 1
space = space + self.datasetSizes[dset]
self.spaceLCp = space
def setSiteSize(self,size):
self.siteSizeGbV = size
def siteSizeGb(self):
return self.siteSizeGbV
def dsetRank(self,set):
return self.datasetRanks[set]
def dsetSize(self,set):
return self.datasetSizes[set]
def isPartial(self,set):
return self.dsetIsPartial[set]
def siteName(self):
return
|
TimurNurlygayanov/mistral
|
mistral/api/controllers/v1/workbook_definition.py
|
Python
|
apache-2.0
| 1,511 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pecan import rest
from pecan imp
|
ort expose
from pecan import request
from mistral.openstack.common import log as logging
from mistral.db import api as db_api
from mistral.services
|
import scheduler
LOG = logging.getLogger(__name__)
class WorkbookDefinitionController(rest.RestController):
@expose()
def get(self, workbook_name):
LOG.debug("Fetch workbook definition [workbook_name=%s]" %
workbook_name)
return db_api.workbook_definition_get(workbook_name)
@expose(content_type="text/plain")
def put(self, workbook_name):
text = request.text
LOG.debug("Update workbook definition [workbook_name=%s, text=%s]" %
(workbook_name, text))
wb = db_api.workbook_definition_put(workbook_name, text)
scheduler.create_associated_triggers(wb)
return wb['definition']
|
Arno-Nymous/pyload
|
module/lib/beaker/crypto/__init__.py
|
Python
|
gpl-3.0
| 1,233 | 0 |
from warnings import warn
from beaker.crypto.pbkdf2 import PBKDF2, strxor
from beaker.crypto.util import hmac, sha1, hmac_sha1, md5
from beaker import util
keyLength = None
if util.jython:
try:
from beaker.crypto.jcecrypto import getKeyLength, aesEncrypt
keyLength = getKeyLength()
except ImportError:
pass
else:
try:
from beaker.crypto.pycrypto import getKeyLength, aesEncrypt, aesDecrypt
keyLength = getKeyLength()
except ImportError:
pass
if not keyLength:
has_aes = False
else:
has_aes = True
if has_aes and keyLength < 32:
warn('Crypto implementation only supports key lengths up to %d bits. '
'Ge
|
nerated session cookies may b
|
e incompatible with other '
'environments' % (keyLength * 8))
def generateCryptoKeys(master_key, salt, iterations):
# NB: We XOR parts of the keystream into the randomly-generated parts, just
# in case os.urandom() isn't as random as it should be. Note that if
# os.urandom() returns truly random data, this will have no effect on the
# overall security.
keystream = PBKDF2(master_key, salt, iterations=iterations)
cipher_key = keystream.read(keyLength)
return cipher_key
|
The-Tasty-Jaffa/Tasty-Jaffa-cogs
|
say/say.py
|
Python
|
gpl-3.0
| 6,218 | 0.008363 |
import discord, os, logging
from discord.ext import commands
from .utils import checks
from .utils.dataIO import dataIO
from .utils.chat_formatting import pagify, box
#The Tasty Jaffa
#Requested by Freud
def get_role(ctx, role_id):
roles = set(ctx.message.server.roles)
for role in roles:
if role.id == role_id:
return role
return None
class say:
def __init__(self, bot):
self.bot = bot
self.settings = dataIO.load_json("data/Tasty/say/settings.json")
print("Testing values in data/Tasty/say")
for server in self.bot.servers:
try:
self.settings[se
|
rver.id]["ROLE"]
self.settings[server.id]["USERS"]
except:
self.settings[server.id] = {}
self.settings[server.id]["ROLE"] = None
self.settings[server.id]["USERS"] = []
@commands.group(name="setsay", pass_context
|
=True, no_pm=True, invoke_without_command=True)
async def sayset(self, ctx):
"""The 'Say' command set
add - Adds a user to have the abillity to use the speak command
list - list users allowed and permited role
remove - Removes a user to have the abillity to use the speak command
role - Adds a permited role to use the speak command"""
if ctx.invoked_subcommand is None:
await self.bot.send_message(ctx.message.channel, "```Please use the speak command with: \n add - Adds a **user** to have the abillity to use the speak command \n remove - Removes a **user** to have the abillity to use the speak command \n role - Adds a role and those with it can use the speak command \n list - lists permited users and the permited role```")
@sayset.command(name="list", pass_context=True)
@checks.admin_or_permissions()
async def say_list(self,ctx):
"""Lists permited users and the permitted role"""
names = []
for user_id in self.settings[ctx.message.server.id]["USERS"]:
names.append(discord.utils.get(self.bot.get_all_members(), id=user_id).name)
msg = ("+ Permited\n"
"{}\n\n"
"".format(", ".join(sorted(names))))
for page in pagify(msg, [" "], shorten_by=16):
await self.bot.say(box(page.lstrip(" "), lang="diff"))
#gets the name of the role and displays it
if self.settings[ctx.message.server.id]["ROLE"] is not None:
await self.bot.send_message(ctx.message.channel, "Permited Role: **{}**".format(get_role(ctx, self.settings[ctx.message.server.id]["ROLE"]).name))
else:
await self.bot.send_message(ctx.message.channel, "No role has permission")
@sayset.command(name="add", pass_context=True, no_pm=True)
@checks.admin_or_permissions()
async def say_add (self, ctx, user: discord.Member):
"""Adds a [user] to have the abillity to use the speak command"""
self.settings[ctx.message.server.id]["USERS"].append(user.id)
self.save()
await self.bot.send_message(ctx.message.channel, "Done!")
@sayset.command(name="remove", pass_context=True, no_pm=True)
@checks.admin_or_permissions()
async def say_remove (self, ctx, user: discord.Member):
"""Removes a [user] to have the abillity to use the speak command"""
try:
self.settings[ctx.message.server.id]["USERS"].remove(user.id)
self.save()
await self.bot.send_message(ctx.message.channel, "Done!")
except:
await self.bot.send_message(ctx.message.channel, "Are you sure that {} had the permision in the first place?".format(user.mention))
@sayset.command(name="role", pass_context=True)
@checks.admin_or_permissions()
async def say_role(self, ctx, role_name:str):
"""Sets the permitted role"""
role = discord.utils.get(ctx.message.server.roles, name=role_name)
if role is not None:
self.settings[ctx.message.server.id]["ROLE"] = role.id
self.save()
await self.bot.send_message(ctx.message.channel, "Role added!")
else:
await self.bot.send_message(ctx.message.channel, "Role not found!")
@commands.command(name="speak", pass_context=True, no_pm =True)
async def bot_say(self, ctx, *, text):
"""The bot repeats what you tell it to"""
if '@everyone' in ctx.message.content and '@here' in ctx.message.content:
await self.bot.send_message(ctx.message.channel, "Woh! {}, please don't do that".format(ctx.message.author.mention))
return
#IF there are no mentions such as @everyone or @here must test useing a string
if ctx.message.channel.permissions_for(ctx.message.server.me).manage_messages is not True:
await self.bot.say("This command requires the **Manage Messages** permission.")
return
#checks if they are allowed (role or permitted)
if ctx.message.author.id in self.settings[ctx.message.server.id]["USERS"] or get_role(ctx, self.settings[ctx.message.server.id]["ROLE"]) in ctx.message.author.roles:
await self.bot.delete_message(ctx.message)
await self.bot.send_message(ctx.message.channel, text)
else:
await self.bot.say("You need to be given access to this command")
def save(self):
dataIO.save_json("data/Tasty/say/settings.json", self.settings)
async def server_join(self, server):
self.settings[server.id]={
"ROLE":None,
"USERS":[],
}
self.save()
def check_folders(): #Creates a folder
if not os.path.exists("data/Tasty/say"):
print("Creating data/Tasty/say folder...")
os.makedirs("data/Tasty/say")
def check_files(): #Creates json files in the folder
if not dataIO.is_valid_json("data/Tasty/say/settings.json"):
print("Creating empty settings.json...")
dataIO.save_json("data/Tasty/say/settings.json", {})
def setup(bot):
check_folders()
check_files()
n = say(bot)
bot.add_listener(n.server_join, "on_server_join")
bot.add_cog(n)
|
guilhermef/aws
|
tc_aws/loaders/s3_loader.py
|
Python
|
mit
| 2,000 | 0.0025 |
# coding: utf-8
from boto.s3.bucket import Bucket
from thumbor.utils import logger
from tornado.concurrent import return_future
import urllib2
import thumbor.loaders.http_loader as http_loader
from tc_aws.aws.connection import get_connection
def _get_bucket(url, root_path=None):
"""
Returns a tuple containing bucket name and bucket path.
url: A string of the format /bucket.name/file/path/in/bucket
"""
url_by_piece = url.lstrip("/").split("/")
bucket_name = url_by_piece[0]
if root_path is not None:
url_by_piece[0] = root_path
else:
url_by_piece = url_by_piece[1:]
bucket_path = "/".join(url_by_piece)
return bucket_name, bucket_path
def _normalize_url(url):
"""
:param url:
:return: exactly the same url since we only use http loader if url stars with http prefix.
"""
return url
def _validate_bucket(context, bucket):
allowed_buckets = context.config.ge
|
t('S3_ALLOWED_BUCKETS', default=None)
return not allowed_buckets or bucket in allowed_buckets
@return_future
def load(context, url, callback
|
):
enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)
if enable_http_loader and url.startswith('http'):
return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)
url = urllib2.unquote(url)
bucket = context.config.get('S3_LOADER_BUCKET', default=None)
if not bucket:
bucket, url = _get_bucket(url, root_path=context.config.S3_LOADER_ROOT_PATH)
if _validate_bucket(context, bucket):
bucket_loader = Bucket(
connection=get_connection(context),
name=bucket
)
file_key = None
try:
file_key = bucket_loader.get_key(url)
except Exception, e:
logger.warn("ERROR retrieving image from S3 {0}: {1}".format(url, str(e)))
if file_key:
callback(file_key.read())
return
callback(None)
|
dawran6/zulip
|
zerver/tests/test_push_notifications.py
|
Python
|
apache-2.0
| 17,956 | 0.001337 |
import mock
from mock import call
import time
from typing import Any, Dict, Union, SupportsInt, Text
import gcm
from django.test import TestCase
from django.conf import settings
from zerver.models import PushDeviceToken, UserProfile, Message
from zerver.models import get_user_profile_by_email, receives_online_notifications, \
receives_offline_notifications
from zerver.lib import push_notifications as apn
from zerver.lib.test_classes import (
ZulipTestCase,
)
class MockRedis(object):
data = {} # type: Dict[str, Any]
def hgetall(self, key):
# type: (str) -> Any
return self.data.get(key)
def exists(self, key):
# type: (str) -> bool
return key in self.data
def hmset(self, key, data):
# type: (str, Dict[Any, Any]) -> None
self.data[key] = data
def delete(self, key):
# type: (str) -> None
if self.exists(key):
del self.data[key]
def expire(self, *args, **kwargs):
# type: (*Any, **Any) -> None
pass
class PushNotificationTest(TestCase):
def setUp(self):
# type: () -> None
email = 'hamlet@zulip.com'
apn.connection = apn.get_connection('fake-cert', 'fake-key')
self.redis_client = apn.redis_client = MockRedis() # type: ignore
apn.dbx_connection = apn.get_connection('fake-cert', 'fake-key')
self.user_profile = get_user_profile_by_email(email)
self.tokens = [u'aaaa', u'bbbb']
for token in self.tokens:
PushDeviceToken.objects.create(
kind=PushDeviceToken.APNS,
token=apn.hex_to_b64(token),
user=self.user_profile,
ios_app_id=settings.ZULIP_IOS_APP_ID)
def tearDown(self):
# type: () -> None
for i in [100, 200]:
self.redis_client.delete(apn.get_apns_key(i))
class APNsMessageTest(PushNotificationTest):
@mock.patch('random.getrandbits', side_effect=[100, 200])
def test_apns_message(self, mock_getrandbits):
# type: (mock.MagicMock) -> None
apn.APNsMessage(self.user_profile.id, self.tokens, alert="test")
data = self.redis_client.hgetall(apn.get_apns_key(100))
self.assertEqual(data['token'], 'aaaa')
self.assertEqual(int(data['user_id']), self.user_profile.id)
data = self.redis_client.hgetall(apn.get_apns_key(200))
self.assertEqual(data['token'], 'bbbb')
self.assertEqual(int(data['user_id']), self.user_profile.id)
class ResponseListenerTest(PushNotificationTest):
def get_error_response(self, **kwargs):
# type: (**Any) -> Dict[str, SupportsInt]
er = {'identifier': 0, 'status': 0} # type: Dict[str, SupportsInt]
er.update({k: v for k, v in kwargs.items() if k in er})
return er
def get_cache_value(self):
# type: () -> Dict[str, Union[str, int]]
return {'token': 'aaaa', 'user_id': self.user_profile.id}
@mock.patch('logging.warn')
def test_cache_does_not_exist(self, mock_warn):
# type: (mock.MagicMock) -> None
err_rsp = self.get_error_response(identifier=100, status=1)
apn.response_listener(err_rsp)
msg = "APNs key, apns:100, doesn't not exist."
mock_warn.assert_called_once_with(msg)
@mock.patch('logging.warn')
def test_cache_exists(self, mock_warn):
# type: (mock.MagicMock) -> None
self.redis_client.hmset(apn.get_apns_key(100), self.get_cache_value())
err_rsp = self.get_error_response(identifier=100, status=1)
apn.response_listener(err_rsp)
b64_token = apn.hex_to_b64('aaaa')
errmsg = apn.ERROR_CODES[int(err_rsp['status'])]
msg = ("APNS: Failed to deliver APNS notification to %s, "
"reason: %s" % (b64_token, errmsg))
mock_warn.assert_called_once_with(msg)
@mock.patch('logging.warn')
def test_error_code_eight(self, mock_warn):
# type: (mock.MagicMock) -> None
self.redis_client.hmset(apn.get_apns_key(100), self.get_cache_value())
|
err_rsp = self.get_error_response(identifier=100, status=8)
b64_token = apn.hex_to_b64('aaaa')
self.assertEqual(PushDeviceToken.objects.
|
filter(
user=self.user_profile, token=b64_token).count(), 1)
apn.response_listener(err_rsp)
self.assertEqual(mock_warn.call_count, 2)
self.assertEqual(PushDeviceToken.objects.filter(
user=self.user_profile, token=b64_token).count(), 0)
class TestPushApi(ZulipTestCase):
def test_push_api(self):
# type: () -> None
email = "cordelia@zulip.com"
user = get_user_profile_by_email(email)
self.login(email)
endpoints = [
('/json/users/me/apns_device_token', 'apple-token'),
('/json/users/me/android_gcm_reg_id', 'android-token'),
]
# Test error handling
for endpoint, _ in endpoints:
# Try adding/removing tokens that are too big...
broken_token = "x" * 5000 # too big
result = self.client_post(endpoint, {'token': broken_token})
self.assert_json_error(result, 'Empty or invalid length token')
result = self.client_delete(endpoint, {'token': broken_token})
self.assert_json_error(result, 'Empty or invalid length token')
# Try to remove a non-existent token...
result = self.client_delete(endpoint, {'token': 'non-existent token'})
self.assert_json_error(result, 'Token does not exist')
# Add tokens
for endpoint, token in endpoints:
# Test that we can push twice
result = self.client_post(endpoint, {'token': token})
self.assert_json_success(result)
result = self.client_post(endpoint, {'token': token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assertEqual(len(tokens), 1)
self.assertEqual(tokens[0].token, token)
# User should have tokens for both devices now.
tokens = list(PushDeviceToken.objects.filter(user=user))
self.assertEqual(len(tokens), 2)
# Remove tokens
for endpoint, token in endpoints:
result = self.client_delete(endpoint, {'token': token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assertEqual(len(tokens), 0)
class SendNotificationTest(PushNotificationTest):
@mock.patch('logging.warn')
@mock.patch('logging.info')
@mock.patch('zerver.lib.push_notifications._do_push_to_apns_service')
def test_send_apple_push_notifiction(self, mock_send, mock_info, mock_warn):
# type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None
def test_send(user_id, message, alert):
# type: (int, Message, str) -> None
self.assertEqual(user_id, self.user_profile.id)
self.assertEqual(set(message.tokens), set(self.tokens))
mock_send.side_effect = test_send
apn.send_apple_push_notification_to_user(self.user_profile, "test alert")
self.assertEqual(mock_send.call_count, 1)
@mock.patch('apns.GatewayConnection.send_notification_multiple')
def test_do_push_to_apns_service(self, mock_push):
# type: (mock.MagicMock) -> None
msg = apn.APNsMessage(self.user_profile.id, self.tokens, alert="test")
def test_push(message):
# type: (Message) -> None
self.assertIs(message, msg.get_frame())
mock_push.side_effect = test_push
apn._do_push_to_apns_service(self.user_profile.id, msg, apn.connection)
@mock.patch('logging.warn')
@mock.patch('logging.info')
@mock.patch('apns.GatewayConnection.send_notification_multiple')
def test_connection_single_none(self, mock_push, mock_info, mock_warn):
# type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None
apn.connection = None
apn.send_apple_push_notification_to_user(self.user_profile, "test alert")
@mock
|
RussellRiesJr/CoupleComeStatWithMe
|
ccswm/statApi/serializers.py
|
Python
|
mit
| 1,581 | 0.001898 |
from ccswm.statApi.models import *
from rest_fram
|
ework import serializers
class EpisodeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Episode
fields = ('id', 'url', 'season', 'episodeNumber', 'location')
class StarterSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Starter
fields = ('id', 'url', 'protein', 'proteinStyle', 'side', 'sideStyle')
class EntreeSerializer(serializers.HyperlinkedModelSerializer
|
):
class Meta:
model = Entree
fields = ('id', 'url', 'protein', 'proteinStyle', 'side', 'sideStyle')
class DessertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Dessert
fields = ('id', 'url', 'main', 'secondary')
class EntertainmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Entertainment
fields = ('id', 'url', 'description')
class CoupleMealSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CoupleMeal
fields = ('id', 'url', 'starter', 'entree', 'dessert', 'entertainment')
class CoupleSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Couple
fields = ('id', 'url', 'nightNumber', 'ageRange', 'sexPref', 'mrtlStat', 'mrtlLength', 'theme', 'foodEth', 'episode', 'coupleMeal')
class ResultsSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Results
fields = ('id', 'url', 'couple', 'oppAVote', 'oppBVote', 'totalScore', 'outcome')
|
tiangolo/fastapi
|
docs_src/extra_models/tutorial002.py
|
Python
|
mit
| 824 | 0 |
from typing import Optional
from fastapi import FastAPI
from pydantic import BaseModel, EmailStr
app = FastAPI()
class UserBase(BaseModel):
username: str
email: EmailStr
ful
|
l_name: Optional[str] = None
class UserIn(UserBase):
password: str
|
class UserOut(UserBase):
pass
class UserInDB(UserBase):
hashed_password: str
def fake_password_hasher(raw_password: str):
return "supersecret" + raw_password
def fake_save_user(user_in: UserIn):
hashed_password = fake_password_hasher(user_in.password)
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
print("User saved! ..not really")
return user_in_db
@app.post("/user/", response_model=UserOut)
async def create_user(user_in: UserIn):
user_saved = fake_save_user(user_in)
return user_saved
|
Psycojoker/geholparser
|
src/gehol/__init__.py
|
Python
|
mit
| 76 | 0 |
__version__ = '0.1'
from geholproxy import *
from geholexceptio
|
ns impor
|
t *
|
tempbottle/restcommander
|
play-1.2.4/python/Lib/_abcoll.py
|
Python
|
apache-2.0
| 13,666 | 0.000951 |
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
]
### ONE-TRICK PONIES ###
class Hashable:
__metaclass__ = ABCMeta
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
return NotImplemented
class Iterable:
__metaclass__ = ABCMeta
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
Iterable.register(str)
class Iterator(Iterable):
@abstractmethod
def __next__(self):
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if any("next" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Sized:
__metaclass__ = ABCMeta
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Container:
__metaclass__ = ABCMeta
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Callable:
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if any("__call__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other < self
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other <= self
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
def __ne__(self, other):
return not (self == other)
@classmethod
def _f
|
rom_iterable(cls, it):
'''Construct an ins
|
tance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
def isdisjoint(self, other):
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
# Sets are not hashable by default, but subclasses can change this
__hash__ = None
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxint
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
@abstractmethod
def add(self, value):
"""Return True if it was added, False if already there."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Return True if it was deleted, False if not there."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = it.__next__()
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, c):
for value in self:
if value not in c:
self.discard(value)
return self
def __ixor__(self, it):
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
|
encukou/freeipa
|
ipaclient/plugins/user.py
|
Python
|
gpl-3.0
| 2,966 | 0 |
# Authors:
# Jason Gerard DeRose <jderose@redhat.com>
# Pavel Zuna <pzuna@redhat.com>
#
# Copyright (C) 2008 Red H
|
at
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implie
|
d warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ipaclient.frontend import MethodOverride
from ipalib import errors
from ipalib import Flag
from ipalib import util
from ipalib.plugable import Registry
from ipalib import _
from ipalib import x509
register = Registry()
@register(override=True, no_fail=True)
class user_del(MethodOverride):
def get_options(self):
for option in super(user_del, self).get_options():
yield option
yield Flag(
'preserve?',
include='cli',
doc=_('Delete a user, keeping the entry available for future use'),
)
yield Flag(
'no_preserve?',
include='cli',
doc=_('Delete a user'),
)
def forward(self, *keys, **options):
if self.api.env.context == 'cli':
no_preserve = options.pop('no_preserve', False)
preserve = options.pop('preserve', False)
if no_preserve and preserve:
raise errors.MutuallyExclusiveError(
reason=_("preserve and no-preserve cannot be both set"))
elif no_preserve:
options['preserve'] = False
elif preserve:
options['preserve'] = True
return super(user_del, self).forward(*keys, **options)
@register(override=True, no_fail=True)
class user_show(MethodOverride):
def forward(self, *keys, **options):
if 'out' in options:
util.check_writable_file(options['out'])
result = super(user_show, self).forward(*keys, **options)
if 'usercertificate' in result['result']:
certs = (x509.load_der_x509_certificate(c)
for c in result['result']['usercertificate'])
x509.write_certificate_list(certs, options['out'])
result['summary'] = (
_('Certificate(s) stored in file \'%(file)s\'')
% dict(file=options['out'])
)
return result
else:
raise errors.NoCertificateError(entry=keys[-1])
else:
return super(user_show, self).forward(*keys, **options)
|
davelab6/pyfontaine
|
fontaine/charsets/noto_chars/notosansolditalic_regular.py
|
Python
|
gpl-3.0
| 2,939 | 0.015652 |
# -*- coding: utf-8 -*-
class Charset(object):
common_name = 'NotoSansOldItalic-Regular'
native_name = ''
def glyphs(self):
chars = []
chars.append(0x0000) #uniFEFF ????
chars.append(0x0020) #uni00A0 SPACE
chars.append(0x00A0) #uni00A0 NO-BREAK SPACE
chars.append(0x000D) #uni000D ????
chars.append(0xFEFF) #uniFEFF ZERO WIDTH NO-BREAK SPACE
chars.append(0x0000) #uniFEFF ????
chars.append(0x10301) #glyph00005 OLD ITALIC LETTER BE
chars.append(0x10302) #glyph00006 OLD ITALIC LETTER KE
chars.append(
|
0x10303) #glyph00007 OLD ITALIC LETTER DE
chars.append(0x10304) #glyph00008 OLD ITALIC LETTER E
chars.append(0x10305) #glyph00009 OLD ITALIC LETTER VE
ch
|
ars.append(0x10306) #glyph00010 OLD ITALIC LETTER ZE
chars.append(0x10307) #glyph00011 OLD ITALIC LETTER HE
chars.append(0x10308) #glyph00012 OLD ITALIC LETTER THE
chars.append(0x10309) #glyph00013 OLD ITALIC LETTER I
chars.append(0x1030A) #glyph00014 OLD ITALIC LETTER KA
chars.append(0x1030B) #glyph00015 OLD ITALIC LETTER EL
chars.append(0x1030C) #glyph00016 OLD ITALIC LETTER EM
chars.append(0x000D) #uni000D ????
chars.append(0x1030E) #glyph00018 OLD ITALIC LETTER ESH
chars.append(0x1030F) #glyph00019 OLD ITALIC LETTER O
chars.append(0x10310) #glyph00020 OLD ITALIC LETTER PE
chars.append(0x10311) #glyph00021 OLD ITALIC LETTER SHE
chars.append(0x10312) #glyph00022 OLD ITALIC LETTER KU
chars.append(0x10313) #glyph00023 OLD ITALIC LETTER ER
chars.append(0x10314) #glyph00024 OLD ITALIC LETTER ES
chars.append(0x10315) #glyph00025 OLD ITALIC LETTER TE
chars.append(0x10316) #glyph00026 OLD ITALIC LETTER U
chars.append(0x10317) #glyph00027 OLD ITALIC LETTER EKS
chars.append(0x10318) #glyph00028 OLD ITALIC LETTER PHE
chars.append(0x10319) #glyph00029 OLD ITALIC LETTER KHE
chars.append(0x1031A) #glyph00030 OLD ITALIC LETTER EF
chars.append(0x1031B) #glyph00031 OLD ITALIC LETTER ERS
chars.append(0x1031C) #glyph00032 OLD ITALIC LETTER CHE
chars.append(0x1031D) #glyph00033 OLD ITALIC LETTER II
chars.append(0x10300) #glyph00004 OLD ITALIC LETTER A
chars.append(0x0020) #uni00A0 SPACE
chars.append(0x10321) #glyph00036 OLD ITALIC NUMERAL FIVE
chars.append(0x10322) #glyph00037 OLD ITALIC NUMERAL TEN
chars.append(0x10323) #glyph00038 OLD ITALIC NUMERAL FIFTY
chars.append(0x1031E) #glyph00034 OLD ITALIC LETTER UU
chars.append(0x00A0) #uni00A0 NO-BREAK SPACE
chars.append(0x1030D) #glyph00017 OLD ITALIC LETTER EN
chars.append(0x10320) #glyph00035 OLD ITALIC NUMERAL ONE
chars.append(0xFEFF) #uniFEFF ZERO WIDTH NO-BREAK SPACE
return chars
|
maniteja123/numpy
|
numpy/distutils/fcompiler/gnu.py
|
Python
|
bsd-3-clause
| 14,957 | 0.001872 |
from __future__ import division, absolute_import, print_function
import re
import os
import sys
import warnings
import platform
import tempfile
from subprocess import Popen, PIPE, STDOUT
from numpy.distutils.fcompiler import FCompiler
from numpy.distutils.exec_command import exec_command
from numpy.distutils.misc_util import msvc_runtime_library
from numpy.distutils.compat import get_exception
compilers = ['GnuFCompiler', 'Gnu95FCompiler']
TARGET_R = re.compile("Target: ([a-zA-Z0-9_\-]*)")
# XXX: handle cross compilation
def is_win64():
return sys.platform == "win32" and platform.architecture()[0] == "64bit"
if is_win64():
#_EXTRAFLAGS = ["-fno-leading-underscore"]
_EXTRAFLAGS = []
else:
_EXTRAFLAGS = []
class GnuFCompiler(FCompiler):
compiler_type = 'gnu'
compiler_aliases = ('g77',)
description = 'GNU Fortran 77 compiler'
def gnu_version_match(self, version_string):
"""Handle the different versions of GNU fortran compilers"""
# Strip warning(s) that may be emitted by gfortran
while version_string.startswith('gfortran: warning'):
version_string = version_string[version_string.find('\n')+1:]
# Gfortran versions from after 2010 will output a simple string
# (usually "x.y", "x.y.z" or "x.y.z-q") for ``-dumpversion``; older
# gfortrans may still return long version strings (``-dumpversion`` was
# an alias for ``--version``)
if len(version_string) <= 20:
# Try to find a valid version string
m = re.search(r'([0-9.]+)', version_string)
if m:
# g77 provides a longer version string that starts with GNU
# Fortran
if version_string.startswith('GNU Fortran'):
return ('g77', m.group(1))
# gfortran only outputs a version string such as #.#.#, so check
# if the match is at the start of the string
elif m.start() == 0:
return ('gfortran', m.group(1))
else:
# Output probably from --version, try harder:
m = re.search(r'GNU Fortran\s+95.*?([0-9-.]+)', version_string)
if m:
return ('gfortran', m.group(1))
m = re.search(r'GNU Fortran.*?\-?([0-9-.]+)', version_string)
if m:
v = m.group(1)
if v.startswith('0') or v.startswith('2') or v.startswith('3'):
# the '0' is for early g77's
return ('g77', v)
else:
# at some point in the 4.x series, the ' 95' was dropped
# from the version string
return ('gfortran', v)
# If still nothing, raise an error to make the problem easy to find.
err = 'A valid Fortran version was not found in this string:\n'
raise ValueError(err + version_string)
def version_match(self, version_string):
v = self.gnu_version_match(version_string)
if not v or v[0] != 'g77':
return None
return v[1]
possible_executables = ['g77', 'f77']
executables = {
'version_cmd' : [None, "-dumpversion"],
'compiler_f77' : [None, "-g", "-Wall", "-fno-second-underscore"],
'compiler_f90' : None, # Use --fcompiler=gnu95 for f90 codes
'compiler_fix' : None,
'linker_so' : [None, "-g", "-Wall"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-g", "-Wall"]
}
module_dir_switch = None
module_include_switch = None
# Cygwin: f771: warning: -fPIC ignored for target (all code is
# position independent)
if os.name != 'nt' and sys.platform != 'cygwin':
pic_flags = ['-fPIC']
# use -mno-cygwin for g77 when Python is not Cygwin-Python
if sys.platform == 'win32':
for key in ['version_cmd', 'compiler_f77', 'linker_so', 'linker_exe']:
executables[key].append('-mno-cygwin')
g2c = 'g2c'
suggested_f90_compiler = 'gnu95'
def get_flags_linker_so(self):
opt = self.linker_so[1:]
if sys.platform == 'darwin':
target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
# If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value
# and leave it alone. But, distutils will complain if the
# environment's value is different from the one in the Python
# Makefile used to build Python. We let disutils handle this
# error checking.
if not target:
# If MACOSX_DEPLOYMENT_TARGET is not set in the environment,
# we try to get it first from the Python Makefile and then we
# fall back to setting it to 10.3 to maximize the set of
# versions we can work with. This is a reasonable default
# even when using the official Python dist and those derived
# from it.
import distutils.sysconfig as sc
g = {}
try:
get_makefile_filename = sc.get_makefile_filename
except AttributeError:
pass # i.e. PyPy
else:
filename = get_makefile_filename()
sc.parse_makefile(filename, g)
target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
if target == '10.3':
s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
warnings.warn(s, stacklevel=2)
opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
else:
opt.append("-shared")
if sys.platform.startswith('sunos'):
# SunOS often has dynamically loaded symbols defined in the
# static library libg2c.a The linker doesn't like this. To
# ignore the problem, use the -mimpure-text flag. It isn't
# the safest thing, but seems to work. 'man gcc' says:
# ".. Instead of using -mimpure-text, you should compile all
# source code with -fpic or -fPIC."
opt.append('-mimpure-text')
return opt
def get_libgcc_dir(self):
status, output = exec_command(self.compiler_f77 +
['-print-libgcc-file-name'],
use_tee=0)
if not status:
return os.path.dirname(output)
return None
def get_library_dirs(self):
opt = []
if sys.platform[:5] != 'linux':
d = self.get_libgcc_dir()
if d:
# if windows and not cygwin, libg2c lies in a different folder
if sys.platform == 'win32' and not d.startswith('/usr/lib'):
d = os.path.normpath(d)
path = os.path.join(d, "lib%s.a" % self.g2c)
if not os.path.exists(path):
root = os.path.join(d, *((os.pardir,)*4))
d2 = os.path.abspath(os.path.join(root, 'lib'))
path = os.path.join(d2, "lib%s.a" % self.g2c)
if os.path.exists(path):
opt.append(d2)
opt.append(d)
return opt
def get_libraries(self):
opt = []
d = self.get_libgcc_dir()
i
|
f d is not None:
g2c = self.g2c + '-pic'
f = self.static_lib_format % (g2c, self.static_lib_extension)
if not os.path.isfile(os.path.join(d, f)):
g2c = self.g2c
else:
g2c = self.g2c
if g2c is not None:
opt.app
|
end(g2c)
c_compiler = self.c_compiler
if sys.platform == 'win32' and c_compiler and \
c_compiler.compiler_type == 'msvc':
# the following code is not needed (read: breaks) when using MinGW
# in case want to link F77 compiled code with MSVC
opt.append('gcc')
runtime_lib = msv
|
ge0rgi/cinder
|
cinder/tests/unit/api/v3/test_volume_manage.py
|
Python
|
apache-2.0
| 8,439 | 0 |
# Copyright (c) 2016 Stratoscale, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
import webob
from cinder.api.v3 import router as router_v3
from cinder import context
from cinder import objects
from cinder import test
from cinder.tests.unit.api.contrib import test_volume_manage as test_contrib
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
CONF = cfg.CONF
def app():
# no auth, just let environ['cinder.context'] pass through
api = router_v3.APIRouter()
mapper = fakes.urlmap.URLMap()
mapper['/v3'] = api
return mapper
@ddt.ddt
@mock.patch('cinder.objects.service.Service.get_by_host_and_topic',
test_contrib.service_get)
@mock.patch('cinder.volume.volume_types.get_volume_type_by_name',
test_contrib.vt_get_volume_type_by_name)
@mock.patch('cinder.volume.volume_types.get_volume_type',
test_contrib.vt_get_volume_type)
class VolumeManageTest(test.TestCase):
"""Test cases for cinder/api/v3/volume_manage.py"""
def setUp(self):
super(VolumeManageTest, self).setUp()
self._admin_ctxt = context.RequestContext(fake.USER_ID,
fake.PROJECT_ID,
True)
def _get_resp_post(self, body, version="3.8"):
"""Helper to execute a POST manageable_volumes API call."""
req = webob.Request.blank('/v3/%s/manageable_volumes' %
fake.PROJECT_ID)
req.method = 'POST'
req.headers['Content-Type'] = 'application/json'
req.headers['OpenStack-API-Version'] = 'volume
|
' + version
req.environ['cinder.context'] = self._admin_ctxt
req.
|
body = jsonutils.dump_as_bytes(body)
res = req.get_response(app())
return res
@mock.patch('cinder.volume.api.API.manage_existing',
wraps=test_contrib.api_manage)
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
def test_manage_volume_route(self, mock_validate, mock_api_manage):
"""Test call to manage volume.
There is currently no change between the API in contrib and the API in
v3, so here we simply check that the call is routed properly, rather
than copying all the tests.
"""
body = {'volume': {'host': 'host_ok', 'ref': 'fake_ref'}}
res = self._get_resp_post(body)
self.assertEqual(202, res.status_int, res)
def test_manage_volume_previous_version(self):
body = {'volume': {'host': 'host_ok', 'ref': 'fake_ref'}}
res = self._get_resp_post(body)
self.assertEqual(400, res.status_int, res)
def _get_resp_get(self, host, detailed, paging, version="3.8", **kwargs):
"""Helper to execute a GET os-volume-manage API call."""
params = {'host': host} if host else {}
params.update(kwargs)
if paging:
params.update({'marker': '1234', 'limit': 10,
'offset': 4, 'sort': 'reference:asc'})
query_string = "?%s" % urlencode(params)
detail = ""
if detailed:
detail = "/detail"
req = webob.Request.blank('/v3/%s/manageable_volumes%s%s' %
(fake.PROJECT_ID, detail, query_string))
req.method = 'GET'
req.headers['Content-Type'] = 'application/json'
req.headers['OpenStack-API-Version'] = 'volume ' + version
req.environ['cinder.context'] = self._admin_ctxt
res = req.get_response(app())
return res
@mock.patch('cinder.volume.api.API.get_manageable_volumes',
wraps=test_contrib.api_get_manageable_volumes)
def test_get_manageable_volumes_route(self, mock_api_manageable):
"""Test call to get manageable volumes.
There is currently no change between the API in contrib and the API in
v3, so here we simply check that the call is routed properly, rather
than copying all the tests.
"""
res = self._get_resp_get('fakehost', False, True)
self.assertEqual(200, res.status_int)
def test_get_manageable_volumes_previous_version(self):
res = self._get_resp_get('fakehost', False, True, version="3.7")
self.assertEqual(404, res.status_int)
@mock.patch('cinder.volume.api.API.get_manageable_volumes',
wraps=test_contrib.api_get_manageable_volumes)
def test_get_manageable_volumes_detail_route(self, mock_api_manageable):
"""Test call to get manageable volumes (detailed).
There is currently no change between the API in contrib and the API in
v3, so here we simply check that the call is routed properly, rather
than copying all the tests.
"""
res = self._get_resp_get('fakehost', True, False)
self.assertEqual(200, res.status_int)
def test_get_manageable_volumes_detail_previous_version(self):
res = self._get_resp_get('fakehost', True, False, version="3.7")
self.assertEqual(404, res.status_int)
@ddt.data((True, True, 'detail_list'), (True, False, 'summary_list'),
(False, True, 'detail_list'), (False, False, 'summary_list'))
@ddt.unpack
@mock.patch('cinder.objects.Service.is_up', True)
@mock.patch('cinder.volume.rpcapi.VolumeAPI._get_cctxt')
@mock.patch('cinder.objects.Service.get_by_id')
def test_get_manageable_detail(self, clustered, is_detail, view_method,
get_service_mock, get_cctxt_mock):
if clustered:
host = None
cluster_name = 'mycluster'
version = '3.17'
kwargs = {'cluster': cluster_name}
else:
host = 'fakehost'
cluster_name = None
version = '3.8'
kwargs = {}
service = objects.Service(disabled=False, host='fakehost',
cluster_name=cluster_name)
get_service_mock.return_value = service
volumes = [mock.sentinel.volume1, mock.sentinel.volume2]
get_cctxt_mock.return_value.call.return_value = volumes
view_data = {'manageable-volumes': [{'vol': str(v)} for v in volumes]}
view_path = ('cinder.api.views.manageable_volumes.ViewBuilder.' +
view_method)
with mock.patch(view_path, return_value=view_data) as detail_view_mock:
res = self._get_resp_get(host, is_detail, False, version=version,
**kwargs)
self.assertEqual(200, res.status_int)
get_cctxt_mock.assert_called_once_with(service.service_topic_queue,
version=('3.10', '3.0'))
get_cctxt_mock.return_value.call.assert_called_once_with(
mock.ANY, 'get_manageable_volumes', marker=None,
limit=CONF.osapi_max_limit, offset=0, sort_keys=['reference'],
sort_dirs=['desc'], want_objects=True)
detail_view_mock.assert_called_once_with(mock.ANY, volumes,
len(volumes))
get_service_mock.assert_called_once_with(
mock.ANY, None, host=host, binary='cinder-volume',
cluster_name=cluster_name)
@ddt.data('3.8', '3.17')
def test_get_manageable_missing_host(self, version):
res = self._get_resp_get(None, True, False, version=version)
s
|
TheMOOCAgency/edx-platform
|
openedx/core/djangoapps/programs/tasks/v1/tests/test_tasks.py
|
Python
|
agpl-3.0
| 14,729 | 0.00224 |
"""
Tests for programs celery tasks.
"""
import json
import unittest
from celery.exceptions import MaxRetriesExceededError
import ddt
from django.conf import settings
from django.core.cache import cache
from django.test import override_settings, TestCase
from edx_rest_api_client.client import EdxRestApiClient
from edx_oauth2_provider.tests.factories import ClientFactory
import httpretty
import mock
from provider.constants import CONFIDENTIAL
from lms.djangoapps.certificates.api import MODES
from openedx.core.djangoapps.credentials.tests.mixins import CredentialsApiConfigMixin
from openedx.core.djangoapps.programs.tests import factories
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin
from openedx.core.djangoapps.programs.tasks.v1 import tasks
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from student.tests.factories import UserFactory
TASKS_MODULE = 'openedx.core.djangoapps.programs.tasks.v1.tasks'
UTILS_MODULE = 'openedx.core.djangoapps.programs.utils'
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class GetApiClientTestCase(TestCase, ProgramsApiConfigMixin):
"""
Test the get_api_client function
"""
@mock.patch(TASKS_MODULE + '.JwtBuilder.build_token')
def test_get_api_client(self, mock_build_token):
"""
Ensure the function is making the right API calls based on inputs
"""
student = UserFactory()
ClientFactory.create(name='programs')
api_config = self.create_programs_config(
internal_service_url='http://foo',
api_version_number=99,
)
mock_build_token.return_value = 'test-token'
api_client = tasks.get_api_client(api_config, student)
self.assertEqual(api_client._store['base_url'], 'http://foo/api/v99/') # pylint: disable=protected-access
self.assertEqual(api_client._store['session'].auth.token, 'test-token') # pylint: disable=protected-access
@httpretty.activate
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class GetCompletedProgramsTestCase(ProgramsApiConfigMixin, CacheIsolationTestCase):
"""
Test the get_completed_programs function
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(GetCompletedProgramsTestCase, self).setUp()
self.user = UserFactory()
self.programs_config = self.create_programs_config(cache_ttl=5)
ClientFactory(name=self.programs_config.OAUTH2_CLIENT_NAME, client_type=CONFIDENTIAL)
cache.clear()
def _mock_programs_api(self, data):
"""Helper for mocking out Programs API URLs."""
self.assertTrue(httpretty.is_enabled(), msg='httpretty must be enabled to mock Programs API calls.')
url = self.programs_config.internal_api_url.strip('/') + '/programs/'
body = json.dumps({'results': data})
httpretty.register_uri(httpretty.GET, url, body=body, content_type='application/json')
d
|
ef _assert_num_requests(self, count):
"""DRY helper for verifying request counts."""
self.assertEqual(len(httpretty.httpretty.latest_requests), count)
@mock.patch(UTILS_MODULE + '.get_completed_courses')
def test_get_
|
completed_programs(self, mock_get_completed_courses):
"""
Verify that completed programs are found, using the cache when possible.
"""
course_id = 'org/course/run'
data = [
factories.Program(
organizations=[factories.Organization()],
course_codes=[
factories.CourseCode(run_modes=[
factories.RunMode(course_key=course_id),
]),
]
),
]
self._mock_programs_api(data)
mock_get_completed_courses.return_value = [
{'course_id': course_id, 'mode': MODES.verified}
]
for _ in range(2):
result = tasks.get_completed_programs(self.user)
self.assertEqual(result, [data[0]['id']])
# Verify that only one request to programs was made (i.e., the cache was hit).
self._assert_num_requests(1)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class GetAwardedCertificateProgramsTestCase(TestCase):
"""
Test the get_awarded_certificate_programs function
"""
def make_credential_result(self, **kwargs):
"""
Helper to make dummy results from the credentials API
"""
result = {
'id': 1,
'username': 'dummy-username',
'credential': {
'credential_id': None,
'program_id': None,
},
'status': 'dummy-status',
'uuid': 'dummy-uuid',
'certificate_url': 'http://credentials.edx.org/credentials/dummy-uuid/'
}
result.update(**kwargs)
return result
@mock.patch(TASKS_MODULE + '.get_user_credentials')
def test_get_awarded_certificate_programs(self, mock_get_user_credentials):
"""
Ensure the API is called and results handled correctly.
"""
student = UserFactory(username='test-username')
mock_get_user_credentials.return_value = [
self.make_credential_result(status='awarded', credential={'program_id': 1}),
self.make_credential_result(status='awarded', credential={'course_id': 2}),
self.make_credential_result(status='revoked', credential={'program_id': 3}),
]
result = tasks.get_awarded_certificate_programs(student)
self.assertEqual(mock_get_user_credentials.call_args[0], (student, ))
self.assertEqual(result, [1])
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class AwardProgramCertificateTestCase(TestCase):
"""
Test the award_program_certificate function
"""
@httpretty.activate
def test_award_program_certificate(self):
"""
Ensure the correct API call gets made
"""
test_username = 'test-username'
test_client = EdxRestApiClient('http://test-server', jwt='test-token')
httpretty.register_uri(
httpretty.POST,
'http://test-server/user_credentials/',
)
tasks.award_program_certificate(test_client, test_username, 123)
expected_body = {
'username': test_username,
'credential': {'program_id': 123},
'attributes': []
}
self.assertEqual(json.loads(httpretty.last_request().body), expected_body)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.ddt
@mock.patch(TASKS_MODULE + '.award_program_certificate')
@mock.patch(TASKS_MODULE + '.get_awarded_certificate_programs')
@mock.patch(TASKS_MODULE + '.get_completed_programs')
@override_settings(CREDENTIALS_SERVICE_USERNAME='test-service-username')
class AwardProgramCertificatesTestCase(TestCase, ProgramsApiConfigMixin, CredentialsApiConfigMixin):
"""
Tests for the 'award_program_certificates' celery task.
"""
def setUp(self):
super(AwardProgramCertificatesTestCase, self).setUp()
self.create_programs_config()
self.create_credentials_config()
self.student = UserFactory.create(username='test-student')
ClientFactory.create(name='programs')
ClientFactory.create(name='credentials')
UserFactory.create(username=settings.CREDENTIALS_SERVICE_USERNAME) # pylint: disable=no-member
def test_completion_check(
self,
mock_get_completed_programs,
mock_get_awarded_certificate_programs, # pylint: disable=unused-argument
mock_award_program_certificate, # pylint: disable=unused-argument
):
"""
Checks that the Programs API is used correctly to determine completed
programs.
"""
tasks.award_program_certificates.delay(self.student.username).get()
mock_get_completed_programs.assert_called_once_with(self.student)
@ddt.data(
([1],
|
kumar303/rockit
|
vendor-local/boto/s3/bucket.py
|
Python
|
bsd-3-clause
| 62,883 | 0.001749 |
# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sub
|
license, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be inc
|
luded
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from boto import handler
from boto.resultset import ResultSet
from boto.exception import BotoClientError
from boto.s3.acl import Policy, CannedACLStrings, Grant
from boto.s3.key import Key
from boto.s3.prefix import Prefix
from boto.s3.deletemarker import DeleteMarker
from boto.s3.multipart import MultiPartUpload
from boto.s3.multipart import CompleteMultiPartUpload
from boto.s3.multidelete import MultiDeleteResult
from boto.s3.multidelete import Error
from boto.s3.bucketlistresultset import BucketListResultSet
from boto.s3.bucketlistresultset import VersionedBucketListResultSet
from boto.s3.bucketlistresultset import MultiPartUploadListResultSet
from boto.s3.lifecycle import Lifecycle
from boto.s3.bucketlogging import BucketLogging
import boto.jsonresponse
import boto.utils
import xml.sax
import xml.sax.saxutils
import StringIO
import urllib
import re
import base64
from collections import defaultdict
# as per http://goo.gl/BDuud (02/19/2011)
class S3WebsiteEndpointTranslate:
trans_region = defaultdict(lambda :'s3-website-us-east-1')
trans_region['eu-west-1'] = 's3-website-eu-west-1'
trans_region['us-west-1'] = 's3-website-us-west-1'
trans_region['us-west-2'] = 's3-website-us-west-2'
trans_region['sa-east-1'] = 's3-website-sa-east-1'
trans_region['ap-northeast-1'] = 's3-website-ap-northeast-1'
trans_region['ap-southeast-1'] = 's3-website-ap-southeast-1'
@classmethod
def translate_region(self, reg):
return self.trans_region[reg]
S3Permissions = ['READ', 'WRITE', 'READ_ACP', 'WRITE_ACP', 'FULL_CONTROL']
class Bucket(object):
LoggingGroup = 'http://acs.amazonaws.com/groups/s3/LogDelivery'
BucketPaymentBody = """<?xml version="1.0" encoding="UTF-8"?>
<RequestPaymentConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Payer>%s</Payer>
</RequestPaymentConfiguration>"""
VersioningBody = """<?xml version="1.0" encoding="UTF-8"?>
<VersioningConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Status>%s</Status>
<MfaDelete>%s</MfaDelete>
</VersioningConfiguration>"""
WebsiteBody = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<IndexDocument><Suffix>%s</Suffix></IndexDocument>
%s
</WebsiteConfiguration>"""
WebsiteErrorFragment = """<ErrorDocument><Key>%s</Key></ErrorDocument>"""
VersionRE = '<Status>([A-Za-z]+)</Status>'
MFADeleteRE = '<MfaDelete>([A-Za-z]+)</MfaDelete>'
def __init__(self, connection=None, name=None, key_class=Key):
self.name = name
self.connection = connection
self.key_class = key_class
def __repr__(self):
return '<Bucket: %s>' % self.name
def __iter__(self):
return iter(BucketListResultSet(self))
def __contains__(self, key_name):
return not (self.get_key(key_name) is None)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'CreationDate':
self.creation_date = value
else:
setattr(self, name, value)
def set_key_class(self, key_class):
"""
Set the Key class associated with this bucket. By default, this
would be the boto.s3.key.Key class but if you want to subclass that
for some reason this allows you to associate your new class with a
bucket so that when you call bucket.new_key() or when you get a listing
of keys in the bucket you will get an instances of your key class
rather than the default.
:type key_class: class
:param key_class: A subclass of Key that can be more specific
"""
self.key_class = key_class
def lookup(self, key_name, headers=None):
"""
Deprecated: Please use get_key method.
:type key_name: string
:param key_name: The name of the key to retrieve
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
return self.get_key(key_name, headers=headers)
def get_key(self, key_name, headers=None, version_id=None):
"""
Check to see if a particular key exists within the bucket. This
method uses a HEAD request to check for the existance of the key.
Returns: An instance of a Key object or None
:type key_name: string
:param key_name: The name of the key to retrieve
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
if version_id:
query_args = 'versionId=%s' % version_id
else:
query_args = None
response = self.connection.make_request('HEAD', self.name, key_name,
headers=headers,
query_args=query_args)
# Allow any success status (2xx) - for example this lets us
# support Range gets, which return status 206:
if response.status/100 == 2:
response.read()
k = self.key_class(self)
provider = self.connection.provider
k.metadata = boto.utils.get_aws_metadata(response.msg, provider)
k.etag = response.getheader('etag')
k.content_type = response.getheader('content-type')
k.content_encoding = response.getheader('content-encoding')
k.last_modified = response.getheader('last-modified')
# the following machinations are a workaround to the fact that
# apache/fastcgi omits the content-length header on HEAD
# requests when the content-length is zero.
# See http://goo.gl/0Tdax for more details.
clen = response.getheader('content-length')
if clen:
k.size = int(response.getheader('content-length'))
else:
k.size = 0
k.cache_control = response.getheader('cache-control')
k.name = key_name
k.handle_version_headers(response)
k.handle_encryption_headers(response)
return k
else:
if response.status == 404:
response.read()
return None
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, '')
def list(self, prefix='', delimiter='', marker='', headers=None):
"""
List key objects within a bucket. This returns an instance of an
BucketListResultSet that automatically handles all of the result
paging, etc. from S3. You just need to keep iterating until
there are no more results.
Called with no arguments, this will return a
|
rossant/galry
|
examples/modern_art.py
|
Python
|
bsd-3-clause
| 365 | 0.00274 |
"""Modern art."""
from galry import *
import num
|
py.random as rdn
figure(constrain_ratio=True, antialiasing=True)
# random positions
positions = .25 * rdn.randn(1000, 2)
# random colors
colors = rdn.rand(len(positions),4)
# TRIANGLES: three consecutive points = o
|
ne triangle, no overlap
plot(primitive_type='TRIANGLES', position=positions, color=colors)
show()
|
volnt/22board
|
app/config.py
|
Python
|
mit
| 240 | 0 |
from os import environ
AWS_ACCE
|
SS_KEY_ID = environ["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = environ["AWS_SECRET_ACCESS_KEY"]
BUCKET_NAME = "22board-captchas"
AWS_ROOT_URL = "https:
|
//s3-eu-west-1.amazonaws.com/{}/".format(BUCKET_NAME)
|
cubiks/rpi_thermo_py
|
test/test_logging.py
|
Python
|
mit
| 147 | 0 |
import
|
logging.config
logging.config.fileConfig("config/logging.conf")
logger = logging.getLogger("temp")
logger.info("Using temperature
|
logger")
|
danielhers/ucca
|
scripts/set_external_id_offline.py
|
Python
|
gpl-3.0
| 1,091 | 0.003666 |
#!/usr/bin/env python3
import argparse
import os
import sys
from ucca.ioutil import get_passages_with_progress_bar, write_passage
desc = """Rename passages by a given mapping of IDs"""
def main(filename, input_filenames, outdir):
os.makedirs(outdir, ex
|
ist_ok=True)
with open(filename, encoding="utf-8") as f:
pairs = [line.strip().split() for line in f]
old_to_new_id = {old_id: new_id for new_id, old_id in pairs}
for passage in get_passages_with_progress_bar(input_filenames, desc="Renaming"):
passage._ID = old_to_new_id[passage.ID]
write_passa
|
ge(passage, outdir=outdir, verbose=False)
if __name__ == "__main__":
argument_parser = argparse.ArgumentParser(description=desc)
argument_parser.add_argument("filename", help="file with lines of the form <NEW ID> <OLD ID>")
argument_parser.add_argument("input_filenames", help="filename pattern or directory with input passages")
argument_parser.add_argument("-o", "--outdir", default=".", help="output directory")
main(**vars(argument_parser.parse_args()))
sys.exit(0)
|
fniephaus/alfred-homebrew
|
src/brew.py
|
Python
|
mit
| 12,427 | 0.000644 |
# encoding: utf-8
import os
import subprocess
import sys
from workflow import Workflow3 as Workflow, MATCH_SUBSTRING
from workflow.background import run_in_background
import brew_actions
import helpers
GITHUB_SLUG = 'fniephaus/alfred-homebrew'
def execute(wf, cmd_list):
brew_arch = helpers.get_brew_arch(wf)
new_env = helpers.initialise_path(brew_arch)
cmd, err = subprocess.Popen(cmd_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=new_env).communicate()
if err:
return err
return cmd
def get_all_formulae():
return execute(wf, ['brew', 'formulae']).splitlines()
def get_installed_formulae():
return execute(wf, ['brew', 'list', '--versions']).splitlines()
def get_pinned_formulae():
return execute(wf, ['brew', 'list', '--pinned', '--versions']).splitlines()
def get_outdated_formulae():
return execute(wf, ['brew', 'outdated', '--formula']).splitlines()
def get_info():
return execute(wf, ['brew', 'info'])
def get_commands(wf, query):
result = execute(wf, ['brew', 'commands']).splitlines()
commands = [x for x in result if ' ' not in x]
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], commands, match_on=MATCH_SUBSTRING)
return commands
def get_all_services():
services_response = execute(wf, ['brew', 'services', 'list']).splitlines()
services_response.pop(0)
services = []
for serviceLine in services_response:
services.append({'name': serviceLine.split()[0], 'status': serviceLine.split()[1]})
return services
def filter_all_formulae(wf, query):
formulae = wf.cached_data('brew_all_formulae',
get_all_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_installed_formulae(wf, query):
formulae = wf.cached_data('brew_installed_formulae',
get_installed_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_pinned_formulae(wf, query):
formulae = wf.cached_data('brew_pinned_formulae',
get_pinned_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_outdated_formulae(wf, query):
formulae = wf.cached_data('brew_outdated_formulae',
get_outdated_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_all_services(wf, query):
services = wf.cached_data('brew_all_services',
get_all_services,
session=True)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], services, key=lambda x: x['name'], match_on=MATCH_SUBSTRING)
return services
def add_service_actions(wf, service_name):
wf.add_item('Run Service',
'Run the service formula without registering to launch at login (or boot).',
autocomplete='services %s run' % service_name,
arg='brew services run %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Stop Service',
'Stop the service formula immediately and unregister it from launching at login (or boot).',
autocomplete='services %s stop' % service_name,
arg='brew services stop %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Start Service',
'Start the service formula immediately and register it to launch at login (or boot).',
autocomplete='services %s start' % service_name,
arg='brew services start %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Restart Service',
'Stop (if necessary) and start the service formula immediately and register it to launch '
'at login (or boot).',
autocomplete='services %s restart' % service_name,
arg='brew services restart %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
def main(wf):
if wf.update_available:
wf.add_item('An update is available!',
autocomplete='workflow:update',
valid=False,
icon=helpers.get_icon(wf, 'cloud-download'))
# Check for brew installation
find_brew = helpers.brew_installed()
if not (find_brew['INTEL'] or find_brew['ARM']):
helpers.brew_installation_instructions(wf)
else:
# extract query
query = wf.args[0] if len(wf.args) else None
if (not query and
len(wf.cached_data('brew_outdated_formulae',
get_outdated_formulae,
max_age=3600)) > 0):
wf.add_item('Some of your formulae are outdated!',
autocomplete='outdated ',
valid=False,
icon=helpers.get_icon(wf, 'cloud-download'))
if query and query.startswith('install'):
for formula in filter_all_formulae(wf, query):
wf.add_item(formula, 'Install formula.',
arg='brew install %s' % formu
|
la,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('services'):
query_filter = query.split()
if len(query_filter) == 2 and query.endswith(' '):
|
service_name = query_filter[1]
add_service_actions(wf, service_name)
else:
services = filter_all_services(wf, query)
for service in services:
wf.add_item(service['name'], 'Select for action. Status: %s' % service['status'],
autocomplete='services %s ' % service['name'],
arg='',
valid=False,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('search'):
for formula in filter_all_formulae(wf, query):
wf.add_item(formula, 'Open formula on GitHub.',
arg='brew info --github %s' % formula,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('uninstall'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Uninstall formula.',
arg='brew uninstall %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('list'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Open formula on GitHub.',
arg='brew info --github %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('pin'):
for formula in filter_installed_formulae(wf, query):
name
|
pythonchelle/opencomparison
|
apps/pypi/slurper.py
|
Python
|
mit
| 2,191 | 0.010041 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
PyPI interface (see http://wiki.python.org/moin/PyPiXmlRpc)
"""
from datetime import datetime
import itertools
import re
import xmlrpclib
from django.template.defaultfilters import slugify
from package.models import Category, Package, Version
from pypi.versioning import highest_version
from celery.decorators import task
base_url = "http://pypi.python.org/pypi/"
PYPI = xmlrpclib.Server(base_url)
class Slurper(object):
""" Fetches data from PyPI """
def __init__(self, package):
self.package_name = package
self.dumb_category, created = Category.objects.get_or_create(
|
title='Python', slug='python')
self.dumb_category.save()
def get_latest_version_number(self, package_name, versions=None):
""" Returns the latest version number for a package """
if versions:
return highest_version(versions)
else:
return highest_version(PYPI.package_releases(package_name))
def get_or_create_package(self, package_name, version):
data =
|
PYPI.release_data(package_name, version)
pypi_url = base_url + package_name
package, created = Package.objects.get_or_create(
title = data['name'],
slug = slugify(package_name),
category = self.dumb_category,
pypi_url = base_url + data['name']
)
package.repo_description = data['summary'] or data['description']
if not package.repo_url:
url = data.get("home_page", None) or data.get('project_url',"") or pypi_url
repo_pattern = '((?:http|https|git)://github.com/[^/]*/[^/]*)/{0,1}'
match = re.match(repo_pattern, url)
if match and match.group(1):
package.repo_url = match.group(1)
else:
# TODO do we want to assume this is a repo url?
# should there be more checking for repo patterns?
package.repo_url = url
package.save()
package.fetch_metadata()
return (package, created)
|
kfcpaladin/sze-the-game
|
renpy/gl/glblacklist.py
|
Python
|
mit
| 1,846 | 0 |
# Copyright 2004-2017 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# The blacklist of OpenGL cards. Fields are:
# - A substring of the Renderer.
# - A substring of the Version.
# - True to allow shader rendering.
# - True to allow fixed-function rendering.
# If both of the last two entries are false, GL refuses to
# start.
BLACK
|
LIST = [
# Crashes for Mugenjohncel.
("S3 Graphics DeltaChrome", "1.4 20.00", False, False),
# A bug in Mesa 7.9 and 7.10 (before 7
|
.10.3) causes the system to
# fail to initialize the GLSL compiler.
# https://bugs.freedesktop.org/show_bug.cgi?id=35603
("Mesa", "Mesa 7.9", False, True),
("Mesa", "Mesa 7.10.3", True, True),
("Mesa", "Mesa 7.10", False, True),
# Default to allowing everything.
("", "", True, True),
]
|
rizzatti/luigi
|
examples/elasticsearch_index.py
|
Python
|
apache-2.0
| 3,410 | 0.00176 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import json
import luigi
from luigi.contrib.esindex import CopyToIndex
class FakeDocuments(luigi.Task):
"""
Generates a local file containing 5 elements of data in JSON format.
"""
#: the date parameter.
date = luigi.DateParameter(default=datetime.date.today())
def run(self):
"""
Writes data in JSON format into the task's output target.
The data objects have the following attributes:
* `_id` is the default Elasticsearch id field,
* `text`: the text,
* `date`: the day when the data was created.
"""
today = datetime.date.today()
with self.output().open('w') as output:
for i in range(5):
output.write(json.dumps({'_id': i, 'text': 'Hi %s' % i,
'date': str(today)}))
output.write('\n')
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget(path='/tmp/_docs-%s.ldj' % self.date)
class IndexDocuments(CopyToIndex):
"""
This task loads JSON data contained in a :py:class:`luigi.target.Target` into an ElasticSearch index.
This task's input will the target returned by :py:meth:`~.FakeDocuments.output`.
This class uses :py:meth:`luigi.contrib.esindex.CopyT
|
oIndex.run`.
After running this task you can run:
.. code-block:: console
$ curl "localhost:9200/example_index/_search?pretty"
to see the indexed documents.
To see the update log, run
.. code-block:: console
$ curl "localhost:9200/update_log/_search?q=target_index:example_index&pretty"
To cleanup both indexes run:
.. code-block:: console
$ curl -XDELETE "localhost:9200/example_index"
$ curl -X
|
DELETE "localhost:9200/update_log/_query?q=target_index:example_index"
"""
#: date task parameter (default = today)
date = luigi.DateParameter(default=datetime.date.today())
#: the name of the index in ElasticSearch to be updated.
index = 'example_index'
#: the name of the document type.
doc_type = 'greetings'
#: the host running the ElasticSearch service.
host = 'localhost'
#: the port used by the ElasticSearch service.
port = 9200
def requires(self):
"""
This task's dependencies:
* :py:class:`~.FakeDocuments`
:return: object (:py:class:`luigi.task.Task`)
"""
return FakeDocuments()
if __name__ == "__main__":
luigi.run(['IndexDocuments', '--local-scheduler'])
|
TalatCikikci/heritago
|
heritago/heritago/urls.py
|
Python
|
mit
| 1,796 | 0.000557 |
""" heritago URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from heritages import views
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r"api/users", views.UserDetail)
router.register(r"api/users", views.Users)
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r"api/v1/heritages/", include("heritages.urls")),
url(r"^api/users/me$", views.UserDetail.as_view({"get": "get_me"})),
# url(r"api/v1/annotations/", views.AnnotationListView.as_view()),
# url(r"api/v1/annotations/(?P<pk>\d+)$", views.AnnotationView.as_view()),
# user auth urls
# url(r'^$', views.diary, name='home'),
# url(r'^login/$', views.login, name='login'),
# url(r'^auth/$', views.auth_view, name='auth_view'),
# url(r'^l
|
ogout/$', views.logout, name='logout'),
# url(r'^invalid/$', views.invalid_login, name='invalid_login'),
# url(r'^r
|
egister/$', views.register_user, name='register_user'),
# url(r'^profile/$', views.user_profile, name='user_profile'),
# url(r'^change_password/$', views.change_password , name='password-change'),
]
urlpatterns += router.urls
|
Jumpers/MysoftAutoTest
|
Step1-PythonBasic/Practices/yuxq/1-5/ex5.py
|
Python
|
apache-2.0
| 570 | 0.033333 |
my_name='Zed A. Shaw'
my_age=35 #notalie
my_height=74 #inches
my_weight=180 #lbs
my_eyes='Blue'
my_teeth='White'
my_hair='Brown'
print "Let's talk about %s." % my_name
print "He's %d inches tall." % my_height
print "He's %d pounds heavy." % my_weight
print "Actually that's not too heavy."
print "He's got %s eyes and %s hair." % (my_ey
|
es,my_hair)
print "His teeth are usually %s depending on the coffee." % my_teeth
#this line is tricky,try to get it exactly r
|
ight
print "If I add %d,%d,and %d I get %d." % (
my_age,my_height,my_weight,my_age+my_height+my_weight)
|
blurstudio/cross3d
|
cross3d/classes/__init__.py
|
Python
|
mit
| 434 | 0.016129 |
##
# \namespace cross3d.classes
#
|
# \remarks [desc::commented]
#
# \author Mikeh
# \author Blur Studio
# \date 06/08/11
#
from fcurve import FCurve
from exceptions import Exceptions
from dispatch import Dispatch
from clipboard import Clipboard
from valuerange import ValueRange
from framerange import FrameRange
from filesequence import FileSequence
from timecode import Timecode
from flipbook import Flip
|
Book
|
exleym/IWBT
|
alembic/versions/97cd7f996752_first_commit.py
|
Python
|
apache-2.0
| 552 | 0 |
"""first commit
Revision ID: 97cd7f996752
Revises: 084658cb0aab
Create Date: 2017-05-20 06:49
|
:09.431920
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '97cd7f996752'
down_revision = '084658cb0aab'
branch_labels = None
depends_on = None
def upgrade():
# ### c
|
ommands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
|
netsuileo/sfu-cluster-dashboard
|
dashboard/app/modules/monitoring_nodes/urls.py
|
Python
|
lgpl-3.0
| 780 | 0.007692 |
# -*- coding: utf8 -*-
from __future__ import absolute_import
from django.conf.urls import url, include
from .views import MonitoringView
urlpatterns = [
url(r'^$', MonitoringView.as_view(), name="index"),
url(r'^info/', include('app.modules.monitoring_nodes.info.urls', namespace='info')),
url(r'^plugins/', include('app.modules.monitoring_nodes.plugins.urls', namespace="plugins")),
url(r'^nodes/', incl
|
ude('app.modules.monitoring_nodes.nodes.urls', namespace="nodes")),
url(r'^groups/', include('app.modules.monitoring_nodes.groups.urls', namespace="groups")),
url(r'^graphs/', include('app.modules.monitoring_nodes.graphs.urls', namespace="graphs")),
url(r'^co
|
nfigs/', include('app.modules.monitoring_nodes.configs.urls', namespace="configs")),
]
|
selectnull/serverscope-benchmark
|
serverscope_benchmark/utils.py
|
Python
|
mit
| 2,357 | 0.001273 |
# -*- coding: utf-8 -*-
import sys
import subprocess
import signal
imp
|
ort locale
from six import print_
from six.moves import urllib
import requests
class Color:
PURPLE = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
ORANGE = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
RESET = '\033[0m'
c = Color
# from http://hg.python.org/cpython/rev/768722b2ae0a/
def restore_signals():
signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
for sig in signals:
if hasattr(signal, sig):
signal.sig
|
nal(getattr(signal, sig), signal.SIG_DFL)
def run_and_print(command, cwd=None):
p = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=-1,
cwd=cwd,
preexec_fn=restore_signals,
universal_newlines=True)
chunks = []
encoding = locale.getdefaultlocale()[1] or 'ascii'
try:
while True:
chunk = p.stdout.readline()
if chunk != '':
try:
getattr(sys.stdout, 'buffer', sys.stdout).write(chunk.encode(encoding))
sys.stdout.flush()
except UnicodeDecodeError:
pass
chunks.append(chunk)
else:
break
finally:
p.stdout.close()
p.wait()
return ''.join(chunks)
def post_results(data, devnull):
url = 'https://serverscope.io/api/trials.txt'
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/plain',
'User-Agent': 'serverscope.io benchmark tool'
}
response = requests.post(url, data=urllib.parse.urlencode(data), headers=headers)
print_(response.text)
def get_geo_info():
"""Return geo location information."""
print_(c.GREEN + 'Retrieving server location... ' + c.RESET)
try:
cmd = ['curl', '-s', 'http://geoip.nekudo.com/api/']
geo = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
universal_newlines=True).communicate()[0]
except ValueError:
print_(c.RED + "geoip API error. Terminating..." + c.RESET)
sys.exit(1)
return geo
|
MiltosD/CEF-ELRC
|
misc/tools/statserver/urls.py
|
Python
|
bsd-3-clause
| 427 | 0.032787 |
from django.conf.urls.defaults import patterns
|
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
( r'^$', 'statserver.stats.views.browse' ),
( r'^stats/addnode$', 'statserver.stats.views.addnode' ),
( r'^media
|
/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
|
louisLouL/pair_trading
|
capstone_env/lib/python3.6/site-packages/kafka/protocol/produce.py
|
Python
|
mit
| 3,232 | 0.001238 |
from __future__ import absolute_import
from .api import Request, Response
from .message import MessageSet
from .types import Int16, Int32, Int64, String, Array, Schema
class ProduceResponse_v0(Response):
API_KEY = 0
API_VERSION = 0
SCHEMA = Schema(
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('error_code', Int16),
('offset', Int64)))))
)
class ProduceResponse_v1(Response):
API_KEY = 0
API_VERSION = 1
SCHEMA = Schema(
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('error_code', Int16),
('offset', Int64))))),
('throttle_time_ms', Int32)
)
class ProduceResponse_v2(Response):
API_KEY = 0
API_VERSION = 2
SCHEMA = Schema(
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('error_code', Int16),
('offset', Int64),
|
('timestamp', Int64))))),
('throttle_time_ms', Int32)
)
class ProduceResponse_v3(Response):
API_KEY = 0
API_VERSION = 3
SCHEMA = ProduceResponse_v2.SCHEMA
class ProduceRequest_v0(Request):
API_KEY = 0
API_VERSION = 0
RESPONSE_TYPE = ProduceResponse_v0
SCHEMA = Schema(
('required_acks', Int16),
('timeout', Int32),
('topics', Array(
('topic', String(
|
'utf-8')),
('partitions', Array(
('partition', Int32),
('messages', MessageSet)))))
)
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
class ProduceRequest_v1(Request):
API_KEY = 0
API_VERSION = 1
RESPONSE_TYPE = ProduceResponse_v1
SCHEMA = ProduceRequest_v0.SCHEMA
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
class ProduceRequest_v2(Request):
API_KEY = 0
API_VERSION = 2
RESPONSE_TYPE = ProduceResponse_v2
SCHEMA = ProduceRequest_v1.SCHEMA
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
class ProduceRequest_v3(Request):
API_KEY = 0
API_VERSION = 3
RESPONSE_TYPE = ProduceResponse_v3
SCHEMA = Schema(
('transactional_id', String('utf-8')),
('required_acks', Int16),
('timeout', Int32),
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('messages', MessageSet)))))
)
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
ProduceRequest = [
ProduceRequest_v0, ProduceRequest_v1, ProduceRequest_v2,
ProduceRequest_v3
]
ProduceResponse = [
ProduceResponse_v0, ProduceResponse_v1, ProduceResponse_v2,
ProduceResponse_v2
]
|
nerzhul/Z-Eye
|
service/WebApp/Z_Eye/engine/Switches/API/__init__.py
|
Python
|
gpl-2.0
| 3,146 | 0.020979 |
# -*- coding: utf-8 -*-
"""
* Copyright (C) 2010-2014 Loic BLOT <http://www.unix-experience.fr/>
*
* This program is free software you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import json
from django.shortcuts import render
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
import Cisco
def getSNMPMib(request):
if request.method == "GET" and "mib" in request.GET and "vendor" in request.GET:
# Only Cisco is supported at this time
if request.GET["vendor"] == "cisco" and request.GET["mib"] in Cisco.Mibs:
return HttpResponse(json.dumps(Cisco.Mibs[request.GET["mib"]]), content_type="application/json")
return HttpResponse(_('Err-Wrong-Request'))
def getPortMibValue(request):
if request.method == "GET" and "vendor" in request.GET and "device" in request.GET and "mib" in request.GET:
if request.GET["vendor"] == "cisco":
SwitchObj = Cisco.CiscoSwitch()
mib = request.GET["mib"]
if SwitchObj.setDevice(request.GET["device"]) and mib in Cisco.Mibs:
if "pid" in request.GET and SwitchObj.setPortId(request.GET["pid"]):
# We don't call methods here, it's faster to use the dictionnary
return HttpResponse(SwitchObj.snmpget(Cisco.Mibs[mib]))
else:
# Invalid the port ID
SwitchObj.setPortId("")
return HttpResponse(SwitchObj.snmpget(Cisco.Mibs[mib]))
return HttpResponse(_('Err-Wrong-Request'))
def setPortMibValue(request):
if request.method == "GET" and "vendor" in request.GET and "device" in request.GET and "mib" in request.GET and "value" in request.GET:
if request.GET["ve
|
ndor"] == "cisco":
SwitchObj = Cisco.CiscoSwitch()
mib = request.GET["mib"]
if SwitchObj.setDevice(request.GET["device"]) and mib in Cisco.Mibs:
if "pid" in request.GET and SwitchObj.setPortId(request.GET["pid"]):
# We don't call methods here, it's faster to use the dictionnary
return HttpResponse(SwitchObj.snmpset(Cisco.Mibs[mi
|
b],request.GET["value"]))
else:
# Invalid the port ID
SwitchObj.setPortId("")
return HttpResponse(SwitchObj.snmpset(Cisco.Mibs[mib],request.GET["value"]))
return HttpResponse(_('Err-Wrong-Request'))
def saveDeviceConfig(request):
if request.method == "GET" and "vendor" in request.GET and "device" in request.GET:
if request.GET["vendor"] == "cisco":
SwitchObj = Cisco.CiscoSwitch()
if SwitchObj.setDevice(request.GET["device"]):
return HttpResponse(SwitchObj.saveDeviceConfig())
return HttpResponse(_('Err-Wrong-Request'))
|
nycholas/ask-undrgz
|
src/ask-undrgz/ask_undrgz/settings.py
|
Python
|
bsd-3-clause
| 5,653 | 0.002653 |
# -*- coding: utf-8 -*-
#
# ask-undrgz system of questions uses data from underguiz.
# Copyright (c) 2010, Nycholas de Oliveira e Oliveira <nycholas@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# # Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# # Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# # Neither the name of the Nycholas de Oliveira e Oliveira nor the names of
# its contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS
|
AND CONTRIBUTORS "AS IS"
# AND ANY E
|
XPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Django settings for ask_undrgz project.
import os
ROOT_PATH = os.path.realpath(os.path.dirname(__file__))
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Nycholas de Oliveira e Oliveira', 'nycholas@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Twitter
TWITTER_USERNAME = 'ask_undrgz'
TWITTER_PASSWORD = 'XXX'
TWITTER_CONSUMER_KEY = 'XXX'
TWITTER_CONSUMER_SECRET = 'XXX'
TWITTER_OAUTH_TOKEN = 'XXX'
TWITTER_OAUTH_TOKEN_SECRET = 'XXX'
TWITTER_CALLBACK = 'http://ask-undrgz.appspot.com/_oauth/twitter/callback/'
if DEBUG:
TWITTER_CALLBACK = 'http://localhost:8080/_oauth/twitter/callback/'
ugettext = lambda s: s
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = (
('en', ugettext('English')),
('pt-BR', ugettext('Portuguese Brazil')),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'ti*(j(^fvi!&1cu7#sw7mkhb=dgl5v_$1&v5=wom_l4y!x9j*@'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'ask_undrgz.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
ROOT_PATH + '/templates',
)
INSTALLED_APPS = (
# 'django.contrib.auth',
'django.contrib.contenttypes',
# 'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
'ask_undrgz.question',
)
|
de-vri-es/qtile
|
setup.py
|
Python
|
mit
| 5,466 | 0.000732 |
#!/usr/bin/env python
# Copyright (c) 2008 Aldo Cortesi
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2012 dmpayton
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 roger
# Copyright (c) 2014 Pedro Algarvio
# Copyright (c) 2014-2015 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import textwrap
from setuptools import setup
from setuptools.command.install import install
class CheckCairoXcb(install):
def cairo_xcb_check(self):
try:
from cairocffi import cairo
cairo.cairo_xcb_surface_create
return True
except AttributeError:
return False
def finalize_options(self):
if not self.cairo_xcb_check():
print(textwrap.dedent("""
It looks like your cairocffi was not built with xcffib support. To fix this:
- Ensure a recent xcffib is installed (pip install 'xcffib>=0.3.2')
- The pip cache is cleared (remove ~/.cache/pip, if it exists)
- Reinstall cairocffi, either:
pip install --no-deps --ignore-installed cairocffi
or
pip uninstall cairocffi && pip install cairocffi
"""))
sys.exit(1)
install.finalize_options(self)
long_description = """
A pure-Python tiling window manager.
Features
========
* Simple, small and extensible. It's easy to write your own layouts,
widgets and commands.
* Configured in Python.
* Command shell that allows all aspects of
Qtile to be managed and inspected.
* Complete remote scriptability - write scripts to set up workspaces,
manipulate windows, update status bar widgets and more.
* Qtile's remote scriptability makes it one of the most thoroughly
unit-tested window mangers around.
"""
if '_cffi_backend' in sys.builtin_module_names:
import _cffi_backend
requires_cffi = "cffi==" + _cffi_backend.__version__
else:
requires_cffi = "cffi>=1.1.0"
# PyPy < 2.6 compatibility
if requires_cffi.startswith("cffi==0."):
cffi_args = dict(
zip_safe=False
)
else:
cffi_args = dict(cffi_modules=[
'libqtile/ffi_build.py:pango_ffi',
'libqtile/ffi_build.py:xcursors_ffi'
])
dependencies = ['xcffib>=0.3.2', 'cairocffi>=0.7', 'six>=1.4.1', requires_cffi]
if sys.version_info >= (3, 4):
pass
elif sys.version_info >= (3, 3):
dependencies.append('asyncio')
else:
dependencies.append('trollius')
setup(
name="qtile",
version="0.10.6",
description="A pure-Python tiling window manager.",
long_description=long_description,
classifiers=[
"Intended Audience :: End Users/Desktop",
"Licen
|
se :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language ::
|
Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: Unix",
"Topic :: Desktop Environment :: Window Managers",
],
keywords="qtile tiling window manager",
author="Aldo Cortesi",
author_email="aldo@nullcube.com",
maintainer="Tycho Andersen",
maintainer_email="tycho@tycho.ws",
url="http://qtile.org",
license="MIT",
install_requires=dependencies,
setup_requires=dependencies,
extras_require={
'ipython': ["ipykernel", "jupyter_console"],
},
packages=['libqtile',
'libqtile.interactive',
'libqtile.layout',
'libqtile.scripts',
'libqtile.widget',
'libqtile.resources'
],
package_data={'libqtile.resources': ['battery-icons/*.png']},
entry_points={
'console_scripts': [
'qtile = libqtile.scripts.qtile:main',
'qtile-run = libqtile.scripts.qtile_run:main',
'qtile-top = libqtile.scripts.qtile_top:main',
'qshell = libqtile.scripts.qshell:main',
]
},
scripts=[
'bin/iqshell',
],
data_files=[
('share/man/man1', ['resources/qtile.1',
'resources/qshell.1'])],
cmdclass={'install': CheckCairoXcb},
**cffi_args
)
|
yoms/sentinel-banner-generator
|
banner_generator.py
|
Python
|
apache-2.0
| 6,246 | 0.013289 |
from osgeo import gdal, osr, ogr
import numpy as np
import scipy.misc
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("banner-generator")
def create_raster_from_band( red, green, blue, output_file):
logger.debug("Create big raster in output_file : %s"%output_file)
red_ds = gdal.Open(red)
nx = red_ds.GetRasterBand(1).XSize
ny = red_ds.GetRasterBand(1).YSize
dst_ds = gdal.GetDriverByName('GTiff').Create(output_file, ny, nx, 3, gdal.GDT_UInt16)
dst_ds.SetGeoTransform(red_ds.GetGeoTransform())
dst_ds.SetProjection(red_ds.GetProjection())
def write_band(band, index_band):
logger.debug("Write band : %s"%index_band)
band_ds = gdal.Open(band)
array = band_ds.GetRasterBand(1).ReadAsArray()
dst_ds.GetRasterBand(index_band).WriteArray(array)
write_band(red, 1)
write_band(blue, 2)
write_band(green, 3)
dst_ds.FlushCache()
dst_ds = None
logger.debug("Big raster is write in output_file : %s"%output_file)
def create_png_from_raster(raster_file, output_file, blue_clip=(0.,2500.), red_clip=(0.,2500.), green_clip=(0.,2500.)):
logger.debug("Create big png in output_file : %s"%output_file)
raster_ds = gdal.Open(raster_file)
bytes_max = 255.
if blue_clip[0] > blue_clip[1] :
logger.error("Maximum clip value should be higther than the Minimum clip value")
return False
if red_clip[0] > red_clip[1] :
logger.error("Maximum clip value should be higther than the Minimum clip value")
return False
if green_clip[0] > green_clip[1] :
logger.error("Maximum clip value should be higther than the Minimum clip value")
return False
def clip_array(band_index, clip):
array = np.array(raster_ds.GetRasterBand(band_index).ReadAsArray())
array = np.clip(array, clip[0], clip[1])
array = array - clip[0]
array = (np.float32(array)*bytes_max)/(clip[1]-clip[0])
array = array.astype(int)
return array
logger.debug("Prepare red color, clip raw value at %s, %s"%red_clip)
red_array = clip_array(1, red_clip)
logger.debug("Prepare green color, clip raw value at %s, %s"%green_clip)
green_array = c
|
lip_array(2, green_clip)
logger.debug("Prepare blue color, clip raw value at %s, %s"%blue_clip)
blue_array = clip_array(3, blue_clip)
rgb = np.zeros((len(red_array), len(red_array[0]), 3), dtype=np.uint8)
rgb[..., 0] = red_array
rgb[..., 1] = green_array
rgb[..., 2] = blue_array
logger.debug("Writing png file in %s"%output_file)
scipy.misc.imsave(output_file, rgb)
return True
def get_x_y_for_lon_lat(raster_file, l
|
on, lat):
logger.debug("Compute x and y from lon lat")
logger.debug("Longitude : %s"%lon)
logger.debug("Latitude : %s"%lat)
sref = osr.SpatialReference()
sref.ImportFromEPSG(4326)
# create a geometry from coordinates
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(lon, lat)
raster_ds = gdal.Open(raster_file)
dref = osr.SpatialReference()
dref.ImportFromWkt(raster_ds.GetProjection())
ct = osr.CoordinateTransformation(sref,dref)
point.Transform(ct)
point_x = point.GetX()
point_y = point.GetY()
logger.debug("Point value in raster proj")
logger.debug("Point x : %s"%point_x)
logger.debug("Point y : %s"%point_y)
ulx, xres, xskew, uly, yskew, yres = raster_ds.GetGeoTransform()
logger.debug("Upper left coordinate in proj")
logger.debug("Point x : %s"%ulx)
logger.debug("Point x : %s"%uly)
lrx = ulx + (raster_ds.RasterXSize * xres)
lry = uly + (raster_ds.RasterYSize * yres)
logger.debug("Lower rigth coordinate in proj")
logger.debug("Point x : %s"%lrx)
logger.debug("Point x : %s"%lry)
logger.debug("Raster resolution")
logger.debug("Res on X : %s"%xres)
logger.debug("Res on Y : %s"%yres)
point_x = (point_x- ulx)/xres
point_y = (point_y- uly)/yres
return (int(point_x), int(point_y) )
def extract_banner(img_path, x, y, size_x, size_y, out_path):
logger.debug("Extract banner")
y_min = int(y-size_y/2)
y_max = y_min+size_y
x_min = int(x-size_x/2)
x_max = x_min+size_x
logger.debug("Extract data from table")
logger.debug("Min x : %s"%x_min)
logger.debug("Max x : %s"%x_max)
logger.debug("Min y : %s"%y_min)
logger.debug("Max y : %s"%y_max)
img = scipy.misc.imread(img_path)
y_min = max(0, min(y_min, len(img)))
y_max = max(0, min(y_max, len(img)))
x_min = max(0, min(x_min, len(img[0])))
x_max = max(0, min(x_max, len(img[0])))
logger.debug("After clamp")
logger.debug("Min x : %s"%x_min)
logger.debug("Max x : %s"%x_max)
logger.debug("Min y : %s"%y_min)
logger.debug("Max y : %s"%y_max)
logger.debug("Image y: %s"%len(img))
logger.debug("Image x: %s"%len(img[0]))
if y_max == y_min:
logger.error("After clamp, image size is Null")
return False
if x_max == x_min:
logger.error("After clamp, image size is Null")
return False
rgb = np.zeros((y_max-y_min, x_max-x_min, 3), dtype=np.uint8)
rgb[..., 0] = img[y_min:y_max,x_min:x_max, 0]
rgb[..., 1] = img[y_min:y_max,x_min:x_max, 1]
rgb[..., 2] = img[y_min:y_max,x_min:x_max, 2]
logger.debug("Write banner in output file %s", out_path)
scipy.misc.imsave(out_path, rgb)
return True
if __name__ == '__main__':
logger.setLevel(logging.DEBUG)
tiff_file = "/tmp/out.tiff"
big_png_file = "/tmp/out_big.png"
banner_file = "/tmp/out.png"
# create_raster_from_band( '/tmp/tmp0_if50g9','/tmp/tmpz61ja8cq','/tmp/tmp7dl287r9', tiff_file)
# x, y = get_x_y_for_lon_lat(tiff_file, 1.433333, 43.6)
# create_png_from_raster(tiff_file, big_png_file, red_clip=(250., 2500.), blue_clip=(250., 2500.), green_clip=(250., 2500.))
# extract_banner(big_png_file, x, y,1400, 800, banner_file)
extract_banner(big_png_file, 0, 0,1400, 800, banner_file)
extract_banner(big_png_file, 10980, 10980,1400, 800, banner_file)
extract_banner(big_png_file, 20980, 20980,1400, 800, banner_file)
|
T-R0D/JustForFun
|
aoc2018/day02/test/test_solution.py
|
Python
|
gpl-2.0
| 1,946 | 0.001542 |
import unittest
import day02.solution as solution
class TestDay02(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_part_one(self):
test_input = [
'abcdef',
'bababc',
'abbcde',
'abcccd',
'aabcdd',
'abcdee',
'ababab',
]
self.assertEqual('12', solution.part_one(test_input))
def test_part_two(self):
test_input = [
'abcde',
'fghij',
'klmno',
'pqrst',
'fguij',
'axcye',
'wvxyz',
]
self.assertEqual('fgij', solution.part_two(test_input))
def test_has_n_letters(self):
self.assertFalse(solution.has_n_letters('abcdef', 2))
|
self.assertTrue(solution.has_n_letters('bababc', 2))
self.assertTrue(solution.has_n_letters('abbcde', 2))
self.assertFalse(solution.has_n_letters('abcccd', 2))
self.assertTrue(solution.has_n_letters('aabcdd', 2))
self.assertTrue(solution.has_n_letters('abcdee', 2))
|
self.assertFalse(solution.has_n_letters('ababab', 2))
self.assertFalse(solution.has_n_letters('abcdef', 3))
self.assertTrue(solution.has_n_letters('bababc', 3))
self.assertFalse(solution.has_n_letters('abbcde', 3))
self.assertTrue(solution.has_n_letters('abcccd', 3))
self.assertFalse(solution.has_n_letters('aabcdd', 3))
self.assertFalse(solution.has_n_letters('abcdee', 3))
self.assertTrue(solution.has_n_letters('ababab', 3))
def test_find_difference_in_ids(self):
n_different, differing_letters, same_letters = solution.find_difference_in_ids('abcde', 'axcye')
self.assertEqual(2, n_different)
n_different, differing_letters, same_letters = solution.find_difference_in_ids('fghij', 'fguij')
self.assertEqual(1, n_different)
|
Gjum/agarnet
|
setup.py
|
Python
|
gpl-3.0
| 1,095 | 0.001826 |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='agarnet',
packages=['agarnet'],
py_modules=['agarnet'],
version='0.2.4',
description='agar.io client and connection toolkit',
install_requires=['websocket-client>=0.32.0'],
author='Gjum',
author_email='code.gjum@gmail.com',
url='https://github.com/Gjum/agarnet',
license='GPLv3',
|
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
|
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Education',
'Topic :: Games/Entertainment',
],
)
|
bderembl/mitgcm_configs
|
test_pg_hr/input/mygendata.py
|
Python
|
mit
| 8,360 | 0.035167 |
#!/usr/bin/env python
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import scipy.io.netcdf as netcdf
import spoisson
import def_radius
from scipy import interpolate
from scipy.interpolate import interp1d
import glob
#plt.ion()
binprec = '>f4'
flag_conf = 2 # 0: samelson, 1: grooms 2: basilisk
#% ================== GRID =====================================
rSphere = 6370.e3
deg2m = 2*np.pi*rSphere/360.0
gg = 9.8
alphaT = 2e-4
si_x = 100
si_y = 100
#si_x = 720
#si_y = 720
if flag_conf == 0:
si_z = 33
elif flag_conf == 1:
si_z = 31
elif flag_conf == 2:
si_z = 30
si_x1 = si_x + 1
si_y1 = si_y + 1
# in m
if flag_conf == 0 :
Lx = 5000.0e3
Ly = 5000.0e3
elif flag_conf == 1:
Lx = 3000.0e3
Ly = 3000.0e3
elif flag_conf == 2:
Lx = 5000.0e3
Ly = 5000.0e3
dx = Lx/si_x;
dy = Ly/si_y;
xx = Lx*(np.arange(0,si_x) + 0.5)/(1.0*si_x)
yy = Ly*(np.arange(0,si_y) + 0.5)/(1.0*si_y)
xx1 = Lx*(np.arange(0,si_x+1) )/(1.0*si_x)
yy1 = Ly*(np.arange(0,si_y+1) )/(1.0*si_y)
xg,yg = np.meshgrid(xx,yy)
xu,yu = np.meshgrid(xx1[:-1],yy)
xv,yv = np.meshgrid(xx,yy1[:-1])
xc,yc = np.meshgrid(xx1,yy1)
dx1 = dx*np.ones((si_x))
dy1 = dy*np.ones((si_y))
if flag_conf == 0:
dz1 = np.array([ 37.96964884, 64.27943707, 53.47713828, 55.25052547,
57.14580417, 59.17549133, 61.35478616, 63.70082498,
66.23372436, 68.97643209, 71.95606828, 75.20511746,
78.76157761, 82.67134428, 86.99014783, 91.7853415 ,
97.14066982, 103.16058993, 109.97712612, 117.75970459,
126.72990561, 137.18292117, 149.52003956, 164.30348158,
182.34416842, 204.85766232, 233.75503719, 272.22827077,
326.05469227, 406.94121271, 543.09982806, 532.52164274,
217.48963743])
elif flag_conf == 1:
dz1 = np.zeros((si_z))
dz1[0:4] = 25.
dz1[4:8] = 50.
dz1[8:12] = 75.
dz1[12:16] = 100.
dz1[16:21] = 150.
dz1[21:26] = 200.
dz1[26:] = 250.
elif flag_conf == 2:
dz1 = 5000/si_z*np.ones((si_z))
# # 1 layer configuration
# si_z = 1
# dz1 = np.zeros((si_z))
# dz1[0] = 4000.0
zz = np.reshape(dz1.cumsum(),(si_z,1,1))
dz_fi2 = dz1/2.0
dz2 = np.array([dz_fi2[i] + dz_fi2[i+1] for i in range(len(dz_fi2)-1)])
dz2 = np.reshape(dz2[0:si_z-1],(si_z-1,1,1))
dx1.astype(binprec).tofile('dx.box')
dy1.astype(binprec).tofile('dy.box')
dz1.astype(binprec).tofile('dz.box')
# ==== physical parameters
if flag_conf == 0:
fMin = 3.78e-05
fMax = 1.3e-4
elif flag_conf == 1:
fMin = 4.5e-5
fMax = 1.0e-4
if flag_conf == 2:
fMin = 3.e-05
fMax = 1.3e-4
fmid = 0.5*(fMin + fMax)
beta = (fMax-fMin)/Ly
ff = np.linspace(fMin,fMax,si_y)
print('f_south = {0}; beta = {1}'.format(fMin,beta) )
#%==================== LAND ===================================
landh = np.zeros((si_y,si_x));
H = zz[-1].squeeze()
landh = -H + landh
# walls
landh[:,0] = 0.0
landh[-1,:] = 0.0
landh.astype(binprec).tofile('topog.box')
#%=============== Surface forcing ===================================
# -- temperature --
sst = np.zeros((si_y,si_x));
if flag_conf == 0:
TS = 40.0 # should be 50, but I chose 40 # because the flux is a the top fo the ekman layer
TN = 0.0
elif flag_conf == 1:
TS = 22.0
TN = 2.0
elif flag_conf == 2:
TS = 22.0
TN = 2.0
sst = (TN-TS)*yg/Ly + TS
#thetaClimFile
sst.astype(binprec).tofile('sstclim.box')
# relax time scale (grooms)
rho0 = 1023.0
Cp = 4000.0
tauThetaClimRelax = rho0*Cp*dz1[0]/35. # 35 Watts per square meter per degree Kelvin
# relax time scale (samelson 97)
#(gamma*U*D/L/dz[1]) = 5*6e-6/37 ~ 15 days
#tauThetaClimRelax = 1233333.0
# I (arbitrarily..) set it to 50 days
tauThetaClimRelax = 4320000
# -- wind --
windx = np.zeros((si_y,si_x));
if flag_conf == 0:
tauW = 0.4
elif flag_conf == 1:
tauW = 0.2
elif flag_conf == 2:
tauW = 0.4
windx = -tauW*np.sin(2*np.pi*yg/Ly )
windx = windx*ff.reshape(si_y,1)/fMin
windx.astype(binprec).tofile('windx.box')
#% ============== background density profile ===================
# linear stratification
dep_l = np.linspace(0,H,si_z)
temp_f = (TN-TS)*(dep_l/H) + TS
if si_z > 1:
# interpolate on the new vertical grid
func2 = interp1d(dep_l, temp_f)
temp_i = func2(zz)
else:
temp_i = 1.0*temp_f
temp_i = temp_i.reshape((si_z,1,1))
temp_i.astype(binprec).tofile('tref.box')
#sref.astype(binprec).tofile('sref.box')
#%=============== initial conditions ===================================
# ### ideal ###
# uvel = np.zeros((si_z,si_y,si_x));
# vvel = np.zeros((si_z,si_y,si_x));
# theta = np.zeros((si_z,si_y,si_x));
# eta = np.zeros((si_y,si_x));
# theta = theta + 4.0
# #theta = theta + temp_i - TN
# #theta = theta*(1-yg/Ly) + TN
# uvel.astype(binprec).tofile('uinit.box')
# vvel.astype(binprec).tofile('vinit.box')
# theta.astype(binprec).tofile('tinit.box')
# eta.astype(binprec).tofile('einit.box')
#### from PG ###
dir0 = './data_input/'
if flag_conf == 0:
file1 = 'var_proj_s.nc'
f1 = netcdf.netcdf_file(dir0 + file1,'r')
uvel = f1.variables['u' ][:,:,:].copy()
vvel = f1.variables['v' ][:,:,:].copy()
theta = f1.variables['ti'][:,:,:].copy()
elif flag_conf == 2:
# PG scales
#L = 5000e3 # m
H = 5000 # m
beta = 2.0e-11 # 1/m/s
N2 = 1e-6 # (1/s**2)
Bs = N2*H
Thetas = Bs/gg/alphaT # 1/g alpha
Us = N2*H**2/(beta*Lx**2)
fnot = 3e-5
gg = 9.80665 # nemo value
ff = fnot + beta*yg # should be at u and v points
fmid = fnot + 0.5*Ly*beta
fileb = 'b*'
fileu = 'u*'
allfilesb = sorted(glob.glob(dir0 + fileb));
allfilesu = sorted(glob.glob(dir0 + fileu));
# dimensions
b = np.fromfile(allfilesb[0],'f4')
N = int(b[0])
N1 = N + 1
nl2 = int(len(b)/N1**2)
nl = nl2 - 2
b = np.fromfile(allfilesb[-1],'f4').reshape(nl2,N1,N1).transpose(0,2,1)
uv = np.fromfile(allfilesu[-1],'f4').reshape(2*nl2,N1,N1).transpose(0,2,1)
theta = Thetas*(b[1:-1,1:,1:] - b.min()) + 2.0
uvel = Us*uv[2:-2:2,1:,1:]
vvel = Us*uv[3:-2:2,1:,1:]
si_zpg,si_ypg,si_xpg = theta.shape
dxpg = dx*si_x/si_xpg
# compute pressure for SSH
dudy = np.diff(uvel,1,1)/dxpg
dvdx = np.diff(vvel,1,2)/dxpg
vort = dvdx[0,:-1,:] - dudy[0,:,:-1]
psi = spoisson.sol(vort[:])
psi = psi.reshape((si_xpg-1,si_ypg-1))
psi = psi*dxpg*dxpg*fmid/gg
eta = np.zeros((si_ypg,si_xpg))
eta[:-1,:-1] = psi
# old grid
xx = np.linspace(0,1,si_xpg)
yy = np.linspace(0,1,si_ypg)
xog,yog = np.meshgrid(xx,yy)
xn = np.linspace(0,1,si_x)
yn = np.linspace(0,1,si_y)
xng,yng = np.meshgrid(xn,yn)
uvel_n = np.zeros((si_z,si_y,si_x))
vvel_n = np.zeros((si_z,si_y,si_x))
theta_n = np.zeros((si_z,si_y,si_x))
eta_n = np.zeros((si_y,si_x))
for nz in range(0,si_z):
fint = interpolate.interp2d(xx, yy,uvel[nz,:,:], kind='cubic')
uvel_n[nz,:,:] = fint(xn,yn)
fint = interpolate.interp2d(xx, yy,vvel[nz,:,:], kind='cubic')
vvel_n[nz,:,:] = fint(xn,yn)
fint = interpolate.interp2d(xx, yy,theta[nz,:,:], kind='cubic')
theta_n[nz,:,:] = fint(xn,yn)
fint = inter
|
polate.interp2d(xx, yy,eta, kind='cubic')
eta_n = fint(xn,yn)
#np.savetxt('upg.dat',uvel_n[0,:,:])
#np.savetxt('sstpg.dat',theta_n[0,:,:])
uvel_n.astype(binprec).tofile('uinit.box')
vvel_n.astype(binprec).tofile('vinit.box')
theta_n.astype(binprec).tofile('tinit.box')
eta_n.astype(binprec).tofile('einit.box')
#---------------------
# ------ RBCS --------
#---------------------
tmask = np
|
.ones((si_z,si_y,si_x))
tmask.astype(binprec).tofile('tmask.box')
# relax to initial conditions
theta_n.astype(binprec).tofile('trelax.box')
# compute relaxation length scale
N2 = -gg*alphaT*np.diff(theta_n,axis=0)/dz2
N2_min = 1e-7
N2 = np.where(N2<N2_min, N2_min, N2)
gp = N2*dz2
lmax = 500e3
filt_len = np.zeros((si_y,si_x))
for nx in range(0,si_x):
for ny in range(0,si_y):
rd = def_radius.cal_rad(dz1,gp[:,ny,nx],ff[ny,nx])
filt_len[ny,nx] = np.min([10*rd[1],lmax])
# relaxation near the boundaries
def shape(x,sigma):
return (1-np.exp(-x**2/(2*sigma**2)))
dist = 500e3
filt_bdy = lmax*shape(xg,dist)*shape(xg-Lx,dist)*shape(yg,dist)*shape(yg-Lx,dist)
filt_len = np.where(filt_len<filt_bdy, filt_len, filt_bdy)
filt_len.astype(binprec).tofile('filter_length.box')
# # t
|
IllusionRom-deprecated/android_platform_tools_idea
|
python/testData/quickFixes/PyMakeMethodStaticQuickFixTest/decoWithParams_after.py
|
Python
|
apache-2.0
| 148 | 0.040541 |
__author__ = 'ktisha'
def foo(x):
return x
class A():
@staticmethod
@accepts(i
|
nt, int)
def my_<caret>method():
print "Smth"
| |
aelkikhia/pyduel_engine
|
pyduel_gui/widgets/game_status_widget.py
|
Python
|
apache-2.0
| 560 | 0.003571 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QLabel
class StatusBar(QLabel):
def __init__(self, status, parent=None):
super(StatusBar, self).__init__(parent)
self.setToolTip("game status bar")
self.setText(status)
def setStatus(self, status):
self.setText(status)
if __name__ == '__main
|
__':
import sys
from PyQt4.QtGui import QApplication
app = QApplication(sys.argv)
widget = Stat
|
usBar("R: {} T: {}\tPhase: {}".format(1, 2, 'Move'))
widget.show()
sys.exit(app.exec_())
|
bigswitch/tempest
|
tempest/scenario/test_security_groups_basic_ops.py
|
Python
|
apache-2.0
| 26,295 | 0 |
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from tempest import clients
from tempest.common.utils import data_utils
from tempest import config
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = log.getLogger(__name__)
class TestSecurityGroupsBasicOps(manager.NetworkScenarioTest):
"""The test suite for security groups
This test suite assumes that Nova has been configured to
boot VM's with Neutron-managed networking, and attempts to
verify cross tenant connectivity as follows
ssh:
in order to overcome "ip namespace", each tenant has an "access point"
VM with floating-ip open to incoming ssh connection allowing network
commands (ping/ssh) to be executed from within the
tenant-network-namespace
Tempest host performs key-based authentication to the ssh server via
floating IP address
connectivity test is done by pinging destination server via source server
ssh connection.
success - ping returns
failure - ping_timeout reached
multi-node:
Multi-Node mode is enabled when CONF.compute.min_compute_nodes > 1.
Tests connectivity between servers on different compute nodes.
When enabled, test will boot each new server to different
compute nodes.
setup:
for primary tenant:
1. create a network&subnet
2. create a router (if public router isn't configured)
3. connect tenant network to public network via router
4. create an access point:
a. a security group open to incoming ssh connection
b. a VM with a floating ip
5. create a general empty security group (same as "default", but
without rules allowing in-tenant traffic)
tests:
1. _verify_network_details
2. _verify_mac_addr: for each access point verify that
(subnet, fix_ip, mac address) are as defined in the port list
3. _test_in_tenant_block: test that in-tenant traffic is disabled
without rules allowing it
4. _test_in_tenant_allow: test that in-tenant traffic is enabled
once an appropriate rule has been created
5. _test_cross_tenant_block: test that cross-tenant traffic is disabled
without a rule allowing it on destination tenant
6. _test_cross_tena
|
nt_allow:
* test that cross-
|
tenant traffic is enabled once an appropriate
rule has been created on destination tenant.
* test that reverse traffic is still blocked
* test than reverse traffic is enabled once an appropriate rule has
been created on source tenant
7._test_port_update_new_security_group:
* test that traffic is blocked with default security group
* test that traffic is enabled after updating port with new security
group having appropriate rule
8. _test_multiple_security_groups: test multiple security groups can be
associated with the vm
assumptions:
1. alt_tenant/user existed and is different from primary_tenant/user
2. Public network is defined and reachable from the Tempest host
3. Public router can either be:
* defined, in which case all tenants networks can connect directly
to it, and cross tenant check will be done on the private IP of the
destination tenant
or
* not defined (empty string), in which case each tenant will have
its own router connected to the public network
"""
credentials = ['primary', 'alt', 'admin']
class TenantProperties(object):
"""helper class to save tenant details
id
credentials
network
subnet
security groups
servers
access point
"""
def __init__(self, credentials):
self.manager = clients.Manager(credentials)
# Credentials from manager are filled with both names and IDs
self.creds = self.manager.credentials
self.network = None
self.subnet = None
self.router = None
self.security_groups = {}
self.servers = list()
def set_network(self, network, subnet, router):
self.network = network
self.subnet = subnet
self.router = router
@classmethod
def skip_checks(cls):
super(TestSecurityGroupsBasicOps, cls).skip_checks()
if CONF.baremetal.driver_enabled:
msg = ('Not currently supported by baremetal.')
raise cls.skipException(msg)
if CONF.network.port_vnic_type in ['direct', 'macvtap']:
msg = ('Not currently supported when using vnic_type'
' direct or macvtap')
raise cls.skipException(msg)
if not (CONF.network.project_networks_reachable or
CONF.network.public_network_id):
msg = ('Either project_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
if not test.is_extension_enabled('security-group', 'network'):
msg = "security-group extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
# Create no network resources for these tests.
cls.set_network_resources()
super(TestSecurityGroupsBasicOps, cls).setup_credentials()
# TODO(mnewby) Consider looking up entities as needed instead
# of storing them as collections on the class.
# Credentials from the manager are filled with both IDs and Names
cls.alt_creds = cls.alt_manager.credentials
@classmethod
def resource_setup(cls):
super(TestSecurityGroupsBasicOps, cls).resource_setup()
cls.multi_node = CONF.compute.min_compute_nodes > 1 and \
test.is_scheduler_filter_enabled("DifferentHostFilter")
if cls.multi_node:
LOG.info("Working in Multi Node mode")
else:
LOG.info("Working in Single Node mode")
cls.floating_ips = {}
cls.tenants = {}
creds = cls.manager.credentials
cls.primary_tenant = cls.TenantProperties(creds)
cls.alt_tenant = cls.TenantProperties(cls.alt_creds)
for tenant in [cls.primary_tenant, cls.alt_tenant]:
cls.tenants[tenant.creds.tenant_id] = tenant
cls.floating_ip_access = not CONF.network.public_router_id
# work-around for broken probe port
cls.floating_ip_access = False
def setUp(self):
"""Set up a single tenant with an accessible server.
If multi-host is enabled, save created server uuids.
"""
self.servers = []
super(TestSecurityGroupsBasicOps, self).setUp()
self._deploy_tenant(self.primary_tenant)
self._verify_network_details(self.primary_tenant)
self._verify_mac_addr(self.primary_tenant)
def _create_tenant_keypairs(self, tenant):
keypair = self.create_keypair(tenant.manager.keypairs_client)
tenant.keypair = keypair
def _create_tenant_security_groups(self, tenant):
access_sg = self._create_empty_security_group(
namestart='secgroup_access-',
tenant_id=tenant.creds.tenant_id,
client=tenant.manager.security_groups_client
)
# don't use default secgroup since it allows in
|
leiforion/t1-python
|
terminalone/utils/credentials.py
|
Python
|
bsd-3-clause
| 1,971 | 0.000507 |
# -*- coding: utf-8 -*-
"""Get credentials from file or environment variables"""
from functools import reduce
def dpath(dict_, path):
"""Dig into dictionary by string path. e.g.
dpath({'a': {'b': {'c': 'd'}}}, 'a.b') -> {'c': 'd'}
"""
from operator import getitem
paths = path.split('.')
return reduce(
getitem,
paths,
dict_
)
def credentials(filename=None, root=None):
"""Get credentials from JSON file or environment variables.
JSON file should have credentials in the form of:
{
"username": "myusername",
"password": "supersecret",
"api_key": "myapikey"
}
If filename not provided, fall back on environment variables:
- T1_API_USERNAME
- T1_API_PASSWORD
- T1_A
|
PI_KEY
:param filename: str filename of JSON file containing credentials.
:param root: str path to get to credentials object. For instance, in object:
{
"credentials
|
": {
"api": {
"username": "myusername",
"password": "supersecret",
"api_key": "myapikey"
}
}
}
"root" is "credentials.api"
:return: dict[str]str
:raise: TypeError: no JSON file or envvars
"""
if filename is not None:
import json
with open(filename, 'rb') as f:
conf = json.load(f)
if root is not None:
conf = dpath(conf, root)
else:
import os
try:
conf = {
'username': os.environ['T1_API_USERNAME'],
'password': os.environ['T1_API_PASSWORD'],
'api_key': os.environ['T1_API_KEY'],
}
except KeyError:
raise TypeError('Must either supply JSON file of credentials'
' or set environment variables '
'T1_API_{USERNAME,PASSWORD,KEY}')
return conf
|
nilsonmorales/Badass
|
Packages_POU/pre-paquetes/20140430/20140430.pet/usr/share/local/apps/VideoThumbnail/extopts.py
|
Python
|
gpl-3.0
| 1,009 | 0.038652 |
"""Grab the tips from Options.xml
$Id: extopts.py,v 1.1 2007/01/14 14:07:31 stephen Exp $
Originally ROX-Filer/src/po/tips.py by Thomas Leonard.
"""
from xml.sax import *
from xml.sax.handler import ContentHandler
import os, sys
class Handler(ContentHandler):
data = ""
def startElement(self, tag, attrs):
for x in ['title', 'label', 'end'
|
, 'unit']:
if attrs.has_key(x):
self.trans(attrs[x])
self.data = ""
def characters(self, data):
self.data = self.data + data
def endElement(self, tag):
data = self.data.strip()
if data:
self.trans(data)
self.data = ""
def trans(self, da
|
ta):
data = '\\n'.join(data.split('\n'))
if data:
out.write('_("%s")\n' % data.replace('"', '\\"'))
ifname='Options.xml'
ofname='Options_strings'
if len(sys.argv)>2:
ifname=sys.argv[1]
ofname=sys.argv[2]
elif len(sys.argv)==2:
ifname=sys.argv[1]
print "Extracting translatable bits from %s..." % os.path.basename(ifname)
out = open(ofname, 'wb')
parse(ifname, Handler())
out.close()
|
SanPen/GridCal
|
src/research/power_flow/helm/old/helm_z_pq.py
|
Python
|
lgpl-3.0
| 6,030 | 0.002819 |
import numpy as np
np.set_printoptions(precision=6, suppress=True, linewidth=320)
from numpy import where, zeros, ones, mod, conj, array, dot, angle, complex128 #, complex256
from numpy.linalg import solve
# Set the complex precision to use
complex_type = complex128
def calc_W(n, npqpv, C, W):
"""
Calculation of the inverse coefficients W.
@param n: Order of the coefficients
@param npqpv: number of pq and pv nodes
@param C: Structure of voltage coefficients (Ncoeff x nbus elements)
@param W: Structure of inverse voltage coefficients (Ncoeff x nbus elements)
@return: Array of inverse voltage coefficients for the order n
"""
if n == 0:
res = ones(npqpv, dtype=complex_type)
else:
l = arange(n)
res = -(W[l, :] * C[n - l, :]).sum(axis=0)
res /= conj(C[0, :])
return res
def pade_approximation(n, an, s=1):
"""
Computes the n/2 pade approximant of the series an at the approximation
point s
Arguments:
an: coefficient matrix, (number of coefficients, number of series)
n: order of the series
s: point of approximation
Returns:
pade approximation at s
"""
nn = int(n / 2)
if mod(nn, 2) == 0:
nn
|
-= 1
L = nn
M = nn
an = np.ndarray.flatten(an)
rhs = an[L + 1:L + M + 1]
C = zeros((L, M), dtype=complex_type)
for i in range(L):
k = i + 1
C[i, :] = an[L - M + k:L + k]
try:
b = solve(C, -rhs) # bn to b1
except:
return 0, zeros(L + 1, dtype=complex_type), zeros(L + 1, dtype=complex_type)
b = r_[1, b[::-1]] # b0 = 1
a = z
|
eros(L + 1, dtype=complex_type)
a[0] = an[0]
for i in range(L):
val = complex_type(0)
k = i + 1
for j in range(k + 1):
val += an[k - j] * b[j]
a[i + 1] = val
p = complex_type(0)
q = complex_type(0)
for i in range(L + 1):
p += a[i] * s ** i
q += b[i] * s ** i
return p / q, a, b
# @jit(cache=True)
def helmz(Vbus, Sbus, Ibus, Ybus, pq, pv, ref, pqpv, tol=1e-9, max_ter=5):
"""
Args:
admittances: Circuit complete admittance matrix
slackIndices: Indices of the slack buses (although most likely only one works)
coefficientCount: Number of voltage coefficients to evaluate (Must be an odd number)
powerInjections: Array of power injections matching the admittance matrix size
voltageSetPoints: Array of voltage set points matching the admittance matrix size
types: Array of bus types matching the admittance matrix size. types: {1-> PQ, 2-> PV, 3-> Slack}
Output:
Voltages vector
"""
# reduced impedance matrix
Zred = inv(Ybus[pqpv, :][:, pqpv]).toarray()
# slack currents
Ivd = -Ybus[pqpv, :][:, ref].dot(Vbus[ref])
# slack voltages influence
Ck = Zred.dot(Ivd)
npqpv = len(pqpv)
Vcoeff = zeros((0, npqpv), dtype=complex_type)
Wcoeff = zeros((0, npqpv), dtype=complex_type)
row = zeros((1, npqpv), dtype=complex_type)
for n in range(max_ter):
# reserve memory
Vcoeff = r_[Vcoeff, row.copy()]
Wcoeff = r_[Wcoeff, row.copy()]
if n == 0:
I = Ivd
else:
I = conj(Sbus[pqpv]) * Wcoeff[n-1, :]
# solve the voltage coefficients
Vcoeff[n, :] = Zred.dot(I)
# compute the inverse voltage coefficients
Wcoeff[n, :] = calc_W(n=n, npqpv=npqpv, C=Vcoeff, W=Wcoeff)
# compose the final voltage
voltage = Vbus.copy()
for i, ii in enumerate(pqpv):
voltage[ii], _, _ = pade_approximation(n, Vcoeff[:, i])
# evaluate F(x)
Scalc = voltage * conj(Ybus * voltage - Ibus)
mis = Scalc - Sbus # complex power mismatch
normF = linalg.norm(r_[mis[pv].real, mis[pq].real, mis[pq].imag], Inf)
print('Vcoeff:\n', Vcoeff)
print('V:\n', abs(Vcoeff.sum(axis=0)))
return voltage, normF
if __name__ == "__main__":
from GridCal.Engine.calculation_engine import *
grid = MultiCircuit()
grid.load_file('lynn5buspq.xlsx')
grid.compile()
circuit = grid.circuits[0]
print('\nYbus:\n', circuit.power_flow_input.Ybus.todense())
print('\nYseries:\n', circuit.power_flow_input.Yseries.todense())
print('\nYshunt:\n', circuit.power_flow_input.Yshunt)
print('\nSbus:\n', circuit.power_flow_input.Sbus)
print('\nIbus:\n', circuit.power_flow_input.Ibus)
print('\nVbus:\n', circuit.power_flow_input.Vbus)
print('\ntypes:\n', circuit.power_flow_input.types)
print('\npq:\n', circuit.power_flow_input.pq)
print('\npv:\n', circuit.power_flow_input.pv)
print('\nvd:\n', circuit.power_flow_input.ref)
import time
print('HELM-Z')
start_time = time.time()
cmax = 40
V1, err = helmz(Vbus=circuit.power_flow_input.Vbus,
Sbus=circuit.power_flow_input.Sbus,
Ibus=circuit.power_flow_input.Ibus,
Ybus=circuit.power_flow_input.Yseries,
pq=circuit.power_flow_input.pq,
pv=circuit.power_flow_input.pv,
ref=circuit.power_flow_input.ref,
pqpv=circuit.power_flow_input.pqpv,
max_ter=cmax)
print("--- %s seconds ---" % (time.time() - start_time))
# print_coeffs(C, W, R, X, H)
print('V module:\t', abs(V1))
print('V angle: \t', angle(V1))
print('error: \t', err)
# check the HELM solution: v against the NR power flow
print('\nNR')
options = PowerFlowOptions(SolverType.NR, verbose=False, robust=False, tolerance=1e-9)
power_flow = PowerFlow(grid, options)
start_time = time.time()
power_flow.run()
print("--- %s seconds ---" % (time.time() - start_time))
vnr = circuit.power_flow_results.voltage
print('V module:\t', abs(vnr))
print('V angle: \t', angle(vnr))
print('error: \t', circuit.power_flow_results.error)
# check
print('\ndiff:\t', V1 - vnr)
|
schreiberx/sweet
|
mule/platforms/50_himmuc/JobPlatform.py
|
Python
|
mit
| 4,801 | 0.016038 |
import platform
import socket
import sys
import os
from mule_local.JobGeneration import *
from mule.JobPlatformResources import *
from . import JobPlatformAutodetect
# Underscore defines symbols to be private
_job_id = None
def get_platform_autodetect():
"""
Returns
-------
bool
True if current platform matches, otherwise False
"""
return JobPlatformAutodetect.autodetect()
def get_platform_id():
"""
Return platform ID
Returns
-------
string
unique ID of platform
"""
return "himmuc"
def get_platform_resources():
"""
Return information about hardware
"""
h = JobPlatformResources()
h.num_cores_per_node = 4
# Number of nodes per job are limited
h.num_nodes = 40
h.num_cores_per_socket = 4
h.max_wallclock_seconds = 8*60*60
return h
def jobscript_setup(jg : JobGeneration):
"""
Setup data to generate job script
"""
global _job_id
_job_id = jg.runtime.getUniqueID(jg.compile, jg.unique_id_filter)
return
def jobscript_get_header(jg : JobGeneration):
"""
These headers typically contain the information on e.g. Job exection, number of compute nodes, etc.
Returns
-------
string
multiline text for scripts
"""
global _job_id
p = jg.parallelization
time_str = p.get_max_wallclock_seconds_hh_mm_ss()
#
# See https://www.lrz.de/services/compute/linux-cluster/batch_parallel/example_jobs/
#
content = """#! /bin/bash
#SBATCH -o """+jg.p_job_stdout_filepath+"""
#SBATCH -e """+jg.p_job_stderr_filepath+"""
#SBATCH -D """+jg.p_job_dirpath+"""
#SBATCH -J """+_job_id+"""
#SBATCH --get-user-env
#SBATCH --nodes="""+str(p.num_nodes)+"""
#SBATCH --ntasks-per-node="""+str(p.num_ranks_per_node)+"""
# the above is a good match for the
# CooLMUC2 architecture.
#SBATCH --mail-type=end
#SBATCH --mail-user=schreiberx@gmail.com
#SBATCH --export=NONE
#SBATCH --time="""+time_str+"""
#SBATCH --partition=odr
"""
content += "\n"
content += "module load mpi\n"
if False:
if p.force_turbo_off:
content += """# Try to avoid slowing down CPUs
#SBATCH --cpu-freq=Performance
"""
content += """
source /etc/profile.d/modules.sh
"""
if jg.compile.threading != 'off':
content += """
export OMP_NUM_THREADS="""+str(p.num_threads_per_rank)+"""
"""
if p.core_oversubscription:
raise Exception("Not supported with this script!")
if p.core_affinity != None:
content += "\necho \"Affnity: "+str(p.core_affinity)+"\"\n"
if p.core_affinity == 'compact':
content += "\nexport OMP_PROC_BIND=close\n"
elif p.core_affinity == 'scatter':
content += "\nexport OMP_PROC_BIND=spread\n"
else:
raise Exception("Affinity '"+str(p.core_affinity)+"' not supported")
return content
def jobscript_get_exec_prefix(jg : JobGeneration):
"""
Prefix before executable
Returns
-------
string
multiline text for scripts
"""
content = ""
content += jg.runtime.get_jobscript_plan_exec_prefix(jg.compile, jg.runtime)
return content
def jobscript_get_exec_command(jg : JobGeneration):
"""
Prefix to executable command
Returns
-------
string
multiline text for scripts
"""
p = jg.parallelization
mpiexec = ''
#
# Only use MPI exec if we are allowed to do so
# We shouldn't use mpiexec for validation scripts
#
|
if not p.mpiexec_disabled:
mpiexec = "mpiexec -n "+str(p.num_ranks)
content = """
# mpiexec ... would be here without a line break
EXEC=\""""+jg.compile.getProgramPath()+"""\"
PARAMS=\""""+jg.runtime.getRuntimeOptions()+"""\"
echo \"${EXEC} ${PARAMS}\"
"""+mpiexec+""" $EXEC $PARAMS || exit 1
"""
return content
def jobscript_get_exec_suffix(jg : JobGeneration):
"""
Suffix before executable
Returns
-------
stri
|
ng
multiline text for scripts
"""
content = ""
content += jg.runtime.get_jobscript_plan_exec_suffix(jg.compile, jg.runtime)
return content
def jobscript_get_footer(jg : JobGeneration):
"""
Footer at very end of job script
Returns
-------
string
multiline text for scripts
"""
content = ""
return content
def jobscript_get_compile_command(jg : JobGeneration):
"""
Compile command(s)
This is separated here to put it either
* into the job script (handy for workstations)
or
* into a separate compile file (handy for clusters)
Returns
-------
string
multiline text with compile command to generate executable
"""
content = """
SCONS="scons """+jg.compile.getSConsParams()+' -j 4"'+"""
echo "$SCONS"
$SCONS || exit 1
"""
return content
|
fnp/pylucene
|
samples/LuceneInAction/lia/analysis/keyword/KeywordAnalyzerTest.py
|
Python
|
apache-2.0
| 2,858 | 0.00035 |
# ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
from unittest import Tes
|
tCase
from lucene import \
IndexWriter, Term, SimpleAnalyzer, PerFieldAnalyzerWrapper, \
RAMDirectory, Document, Field, Ind
|
exSearcher, TermQuery, \
QueryParser, Analyzer, StringReader, Token, JavaError, \
Version
from lia.analysis.keyword.KeywordAnalyzer import KeywordAnalyzer
from lia.analysis.keyword.SimpleKeywordAnalyzer import SimpleKeywordAnalyzer
class KeywordAnalyzerTest(TestCase):
def setUp(self):
self.directory = RAMDirectory()
writer = IndexWriter(self.directory, SimpleAnalyzer(), True,
IndexWriter.MaxFieldLength.UNLIMITED)
doc = Document()
doc.add(Field("partnum", "Q36",
Field.Store.YES, Field.Index.NOT_ANALYZED))
doc.add(Field("description", "Illidium Space Modulator",
Field.Store.YES, Field.Index.ANALYZED))
writer.addDocument(doc)
writer.close()
self.searcher = IndexSearcher(self.directory, True)
def testTermQuery(self):
query = TermQuery(Term("partnum", "Q36"))
scoreDocs = self.searcher.search(query, 50).scoreDocs
self.assertEqual(1, len(scoreDocs))
def testBasicQueryParser(self):
analyzer = SimpleAnalyzer()
query = QueryParser(Version.LUCENE_CURRENT, "description",
analyzer).parse("partnum:Q36 AND SPACE")
scoreDocs = self.searcher.search(query, 50).scoreDocs
self.assertEqual("+partnum:q +space", query.toString("description"),
"note Q36 -> q")
self.assertEqual(0, len(scoreDocs), "doc not found :(")
def testPerFieldAnalyzer(self):
analyzer = PerFieldAnalyzerWrapper(SimpleAnalyzer())
analyzer.addAnalyzer("partnum", KeywordAnalyzer())
query = QueryParser(Version.LUCENE_CURRENT, "description",
analyzer).parse("partnum:Q36 AND SPACE")
scoreDocs = self.searcher.search(query, 50).scoreDocs
#self.assertEqual("+partnum:Q36 +space", query.toString("description"))
self.assertEqual(1, len(scoreDocs), "doc found!")
|
Soya93/Extract-Refactoring
|
python/helpers/pydev/tests_pydevd_python/test_pydev_monkey.py
|
Python
|
apache-2.0
| 5,544 | 0.00487 |
import sys
import os
import unittest
try:
from _pydev_bundle import pydev_monkey
except:
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from _pydev_bundle import pydev_monkey
from pydevd import SetupHolder
from _pydev_bundle.pydev_monkey import pydev_src_dir
class TestCase(unittest.TestCase):
def test_monkey(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check='''C:\\bin\\python.exe -u -c "
connect(\\"127.0.0.1\\")
"'''
sys.original_argv = []
self.assertEqual(
'"C:\\bin\\python.exe" "-u" "-c" "import sys; '
'sys.path.append(r\'%s\'); '
'import pydevd; pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, '
'trace_only_current_thread=False, patch_multiprocessing=True); '
'\nconnect(\\"127.0.0.1\\")\n"' % pydev_src_dir
|
,
pydev_monkey.patch_arg_str_win(check)
)
finally:
SetupHolder.setup = original
def test_str_to_args_windows(self):
self.assertEqual(['a', 'b'], pydev_monkey.str_to_args_windows('a "b"'))
def test_monkey_patch_args_indc(self):
original = Se
|
tupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', '-u', '-c', 'connect(\\"127.0.0.1\\")']
sys.original_argv = []
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'-u',
'-c',
(
'import sys; sys.path.append(r\'%s\'); import pydevd; '
'pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, trace_only_current_thread=False, patch_multiprocessing=True); '
'connect(\\"127.0.0.1\\")'
) % pydev_src_dir
])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_module(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', '-m', 'test']
sys.original_argv = ['pydevd', '--multiprocess']
if sys.platform == 'win32':
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'"pydevd"',
'"--module"',
'"--multiprocess"',
'test',
])
else:
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'pydevd',
'--module',
'--multiprocess',
'test',
])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_no_indc(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', 'connect(\\"127.0.0.1\\")']
sys.original_argv = ['my', 'original', 'argv']
if sys.platform == 'win32':
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe', '"my"', '"original"', '"argv"', 'connect(\\"127.0.0.1\\")'])
else:
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe', 'my', 'original', 'argv', 'connect(\\"127.0.0.1\\")'])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_no_indc_with_pydevd(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', 'pydevd.py', 'connect(\\"127.0.0.1\\")', 'bar']
sys.original_argv = ['my', 'original', 'argv']
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe', 'pydevd.py', 'connect(\\"127.0.0.1\\")', 'bar'])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_no_indc_without_pydevd(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', 'target.py', 'connect(\\"127.0.0.1\\")', 'bar']
sys.original_argv = ['pydevd.py', '--a=1', 'b', '--c=2', '--file', 'ignore_this.py']
if sys.platform == 'win32':
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'"pydevd.py"',
'"--a=1"',
'"b"',
'"--c=2"',
'"--file"',
'target.py',
'connect(\\"127.0.0.1\\")',
'bar',
])
else:
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'pydevd.py',
'--a=1',
'b',
'--c=2',
'--file',
'target.py',
'connect(\\"127.0.0.1\\")',
'bar',
])
finally:
SetupHolder.setup = original
if __name__ == '__main__':
unittest.main()
|
mardiros/apium
|
apium/__init__.py
|
Python
|
bsd-3-clause
| 122 | 0 |
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__version__ = '0.1'
|
from .proxy import
|
Proxy
|
wegamekinglc/Finance-Python
|
PyFin/Analysis/TechnicalAnalysis/__init__.py
|
Python
|
mit
| 6,089 | 0.007226 |
# -*- coding: utf-8 -*-
u"""
Created on 2015-8-8
@author: cheng.li
"""
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySignValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAverageValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityXAverageValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityMACDValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityExpValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityLogValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySqrtValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityPowValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAbsValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAcosValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAcoshValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAsinValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAsinhValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityNormInvValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityCeilValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityFloorValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityRoundValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityDiffValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySimpleReturnValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityLogReturnValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityMaximumValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityMinimumValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingDecay
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingMax
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingArgMax
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingMin
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingArgMin
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingRank
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingQuantile
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingAllTrue
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingAnyTrue
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import Securit
|
yMovingSum
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingVariance
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingStandardDeviation
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovi
|
ngCountedPositive
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingPositiveAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingCountedNegative
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingNegativeAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingPositiveDifferenceAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingNegativeDifferenceAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingRSI
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingLogReturn
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingCorrelation
__all__ = ['SecuritySignValueHolder',
'SecurityAverageValueHolder',
'SecurityXAverageValueHolder',
'SecurityMACDValueHolder',
'SecurityExpValueHolder',
'SecurityLogValueHolder',
'SecuritySqrtValueHolder',
'SecurityPowValueHolder',
'SecurityAbsValueHolder',
'SecurityAcosValueHolder',
'SecurityAcoshValueHolder',
'SecurityAsinValueHolder',
'SecurityAsinhValueHolder',
'SecurityNormInvValueHolder',
'SecurityCeilValueHolder',
'SecurityFloorValueHolder',
'SecurityRoundValueHolder',
'SecurityDiffValueHolder',
'SecuritySimpleReturnValueHolder',
'SecurityLogReturnValueHolder',
'SecurityMaximumValueHolder',
'SecurityMinimumValueHolder',
'SecurityMovingAverage',
'SecurityMovingDecay',
'SecurityMovingMax',
'SecurityMovingArgMax',
'SecurityMovingMin',
'SecurityMovingArgMin',
'SecurityMovingRank',
'SecurityMovingQuantile',
'SecurityMovingAllTrue',
'SecurityMovingAnyTrue',
'SecurityMovingSum',
'SecurityMovingVariance',
'SecurityMovingStandardDeviation',
'SecurityMovingCountedPositive',
'SecurityMovingPositiveAverage',
'SecurityMovingCountedNegative',
'SecurityMovingNegativeAverage',
'SecurityMovingPositiveDifferenceAverage',
'SecurityMovingNegativeDifferenceAverage',
'SecurityMovingRSI',
'SecurityMovingLogReturn',
'SecurityMovingCorrelation']
|
bswartz/manila
|
manila/api/middleware/fault.py
|
Python
|
apache-2.0
| 3,089 | 0 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# L
|
icense for the specific language governing permissions and limitations
# under the Lice
|
nse.
from oslo_log import log
import six
import webob.dec
import webob.exc
from manila.api.openstack import wsgi
from manila.i18n import _
from manila import utils
from manila.wsgi import common as base_wsgi
LOG = log.getLogger(__name__)
class FaultWrapper(base_wsgi.Middleware):
"""Calls down the middleware stack, making exceptions into faults."""
_status_to_type = {}
@staticmethod
def status_to_type(status):
if not FaultWrapper._status_to_type:
for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError):
FaultWrapper._status_to_type[clazz.code] = clazz
return FaultWrapper._status_to_type.get(
status, webob.exc.HTTPInternalServerError)()
def _error(self, inner, req):
if isinstance(inner, UnicodeDecodeError):
msg = _("Error decoding your request. Either the URL or the "
"request body contained characters that could not be "
"decoded by Manila.")
return wsgi.Fault(webob.exc.HTTPBadRequest(explanation=msg))
LOG.exception("Caught error: %s", inner)
safe = getattr(inner, 'safe', False)
headers = getattr(inner, 'headers', None)
status = getattr(inner, 'code', 500)
if status is None:
status = 500
msg_dict = dict(url=req.url, status=status)
LOG.info("%(url)s returned with HTTP %(status)d", msg_dict)
outer = self.status_to_type(status)
if headers:
outer.headers = headers
# NOTE(johannes): We leave the explanation empty here on
# purpose. It could possibly have sensitive information
# that should not be returned back to the user. See
# bugs 868360 and 874472
# NOTE(eglynn): However, it would be over-conservative and
# inconsistent with the EC2 API to hide every exception,
# including those that are safe to expose, see bug 1021373
if safe:
outer.explanation = '%s: %s' % (inner.__class__.__name__,
six.text_type(inner))
return wsgi.Fault(outer)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
try:
return req.get_response(self.application)
except Exception as ex:
return self._error(ex, req)
|
aguinane/qld-tariffs
|
qldtariffs/monthanalysis.py
|
Python
|
mit
| 2,897 | 0.000345 |
from statistics import mean
from datetime import datetime, timedelta
import calendar
from typing import NamedTuple
from typing import Iterable, Tuple, Dict
from energy_shaper import group_into_profiled_intervals
from .dayanalysis import Usage, get_daily_charges
class MonthUsage(NamedTuple):
""" Represents a usage period """
days: int
peak: float
shoulder: float
offpeak: float
total: float
demand: float
def __repr__(self) -> str:
return f"<MonthUsage {self.days} days {self.total}>"
def get_monthly_charges(
records: Iterable[Tuple[datetime, datetime, float]],
retailer: str = "ergon",
tariff: str = "T14",
fy: str = "2016",
) -> Dict[Tuple[int, int], MonthUsage]:
""" Get summated monthly charges
:param records: Tuple in the form of (billing_start, billing_end, usage)
:param retailer: Retailer config to get the peak time periods from
:param tariff: Name of tariff from config
"""
months: dict = {}
billing = list(group_into_profiled_intervals(records, interval_m=30))
for reading in billing:
# Dates are end of billing period so first interval is previous day
day = reading.end - timedelta(hours=0.5)
month = (day.year, day.month)
if month not in months:
months[month] = []
dailies = get_daily_charges(records, retailer, tariff, fy)
for day in dailies:
month = (day.year, day.month)
months[month].append(dailies[day])
months_summary = {}
for month in months:
daily_data = months[month]
demand = average_peak_demand(daily_data)
u = [sum(x) for x in zip(*daily_data)]
num_days = calendar.monthrange(month[0], month[1])[1]
summary = MonthUsage(num_days, u[0], u[1], u[2], u[3], demand)
months_summary[month] = summary
return months_summary
def average_daily_peak_demand(peak_usage: float, peak_hrs: float = 6.5) -> float:
""" Calculate the average daily peak demand in kW
:param peak_usage: Usage during peak window in kWh
:param peak_hrs: Length of peak window in hours
"""
return peak_usage / peak_hrs
def average_peak_demand(daily_summary: Dict[str, Usage]) -> float:
""" Get the average peak demand for a set of daily usage stats
"""
# Sort and get top 4 demand days
top_four_days = []
for i, day in enumerate(
sorted
|
(daily_summary, key=lambda tup: (tup[0], tup[1]), reverse=True)
):
if i < 4:
if day.peak:
demand = day.peak
else:
demand = day.shoulder
avg_peak_d
|
emand = average_daily_peak_demand(demand)
top_four_days.append(avg_peak_demand)
if top_four_days:
return mean(top_four_days)
else:
return 0
|
sileht/deb-openstack-quantum
|
quantum/plugins/ryu/nova/linux_net.py
|
Python
|
apache-2.0
| 2,786 | 0.000359 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Isaku Yamahata <yamahata at private email ne jp>
# <yamahata at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import flags
from nova import log as logging
from nova import utils
from nova.network import linux_net
from nova.openstack.common import cfg
from ryu.app.client import OFPClient
LOG = logging.getLogger(__name__)
ryu_linux_net_opt = cfg.StrOpt('linuxnet_ovs_ryu_api_host',
|
default='127.0.0.1:8080',
help='Openflow Ryu REST API host:port')
FLAGS = flags.FLAGS
F
|
LAGS.register_opt(ryu_linux_net_opt)
def _get_datapath_id(bridge_name):
out, _err = utils.execute('ovs-vsctl', 'get', 'Bridge',
bridge_name, 'datapath_id', run_as_root=True)
return out.strip().strip('"')
def _get_port_no(dev):
out, _err = utils.execute('ovs-vsctl', 'get', 'Interface', dev,
'ofport', run_as_root=True)
return int(out.strip())
class LinuxOVSRyuInterfaceDriver(linux_net.LinuxOVSInterfaceDriver):
def __init__(self):
super(LinuxOVSRyuInterfaceDriver, self).__init__()
LOG.debug('ryu rest host %s', FLAGS.linuxnet_ovs_ryu_api_host)
self.ryu_client = OFPClient(FLAGS.linuxnet_ovs_ryu_api_host)
self.datapath_id = _get_datapath_id(
FLAGS.linuxnet_ovs_integration_bridge)
if linux_net.binary_name == 'nova-network':
for tables in [linux_net.iptables_manager.ipv4,
linux_net.iptables_manager.ipv6]:
tables['filter'].add_rule('FORWARD',
'--in-interface gw-+ --out-interface gw-+ -j DROP')
linux_net.iptables_manager.apply()
def plug(self, network, mac_address, gateway=True):
LOG.debug("network %s mac_adress %s gateway %s",
network, mac_address, gateway)
ret = super(LinuxOVSRyuInterfaceDriver, self).plug(
network, mac_address, gateway)
port_no = _get_port_no(self.get_dev(network))
self.ryu_client.create_port(network['uuid'], self.datapath_id, port_no)
return ret
|
dedichan/ChemDB
|
setup_win.py
|
Python
|
gpl-3.0
| 1,223 | 0.035159 |
from distutils.core import setup
import py2exe
import os, sys
from glob import glob
import PyQt
|
5
data_files=[('',['C:/Python34/DLLs/sqlite3.dll','C:/Python34/Lib/site-packages/PyQt5/icuuc53.dll','C:/Python34/Lib/site-packages/PyQt5/icudt53.dll','C:/Python34/Lib/site-packages/PyQt5/icuin53.dll','C:/Python34/Lib/site-pack
|
ages/PyQt5/Qt5Gui.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Core.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Widgets.dll']),
('data',['data/configure','data/model.sqlite','data/loading.jpg']),
('platforms',['C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qminimal.dll','C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qoffscreen.dll','C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qwindows.dll'])
]
qt_platform_plugins = [("platforms", glob(PyQt5.__path__[0] + r'\plugins\platforms\*.*'))]
data_files.extend(qt_platform_plugins)
msvc_dlls = [('.', glob(r'''C:/Windows/System32/msvc?100.dll'''))]
data_files.extend(msvc_dlls)
setup(
windows = ["ChemDB.py"],
zipfile = None,
data_files = data_files,
options = {
'py2exe': {
'includes' : ['sip','PyQt5.QtCore','PyQt5.QtGui',"sqlite3",'xlrd','xlwt',"_sqlite3","PyQt5"],
}
},
)
|
QiJune/Paddle
|
python/paddle/fluid/op.py
|
Python
|
apache-2.0
| 10,014 | 0 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-
|
2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHO
|
UT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import six
import paddle.fluid.core as core
import paddle.fluid.proto.framework_pb2 as framework_pb2
def get_all_op_protos():
"""
Get all registered op proto from PaddlePaddle C++ end.
:return: A list of registered OpProto.
"""
protostrs = core.get_all_op_protos()
ret_values = []
for pbstr in protostrs:
op_proto = framework_pb2.OpProto.FromString(six.binary_type(pbstr))
ret_values.append(op_proto)
return ret_values
def is_str(s):
return isinstance(s, six.string_types)
class OpDescCreationMethod(object):
"""
Convert the user's input(only keyword arguments are supported) to OpDesc
based on the OpProto.
:param op_proto: The OpProto object.
:type op_proto: op_proto_pb2.OpProto
"""
def __init__(self, op_proto):
if not isinstance(op_proto, framework_pb2.OpProto):
raise TypeError(
"Type of op_proto should be OpProto in PaddlePaddle.")
self.__op_proto__ = op_proto
def __call__(self, *args, **kwargs):
"""
Convert user's input to OpDesc. Only keyword arguments are supported.
:return: The OpDesc based on user input.
:rtype: op_desc_pb2.OpDesc
"""
if len(args) != 0:
raise ValueError("Only keyword arguments are supported.")
op_desc = framework_pb2.OpDesc()
for input_parameter in self.__op_proto__.inputs:
input_arguments = kwargs.get(input_parameter.name, [])
if is_str(input_arguments):
input_arguments = [input_arguments]
if not input_parameter.duplicable and len(input_arguments) > 1:
raise ValueError(
"Input %s expects only one input, but %d are given." %
(input_parameter.name, len(input_arguments)))
ipt = op_desc.inputs.add()
ipt.parameter = input_parameter.name
ipt.arguments.extend(input_arguments)
for output_parameter in self.__op_proto__.outputs:
output_arguments = kwargs.get(output_parameter.name, [])
if is_str(output_arguments):
output_arguments = [output_arguments]
if not output_parameter.duplicable and len(output_arguments) > 1:
raise ValueError(
"Output %s expects only one output, but %d are given." %
(output_parameter.name, len(output_arguments)))
out = op_desc.outputs.add()
out.parameter = output_parameter.name
out.arguments.extend(output_arguments)
# Types
op_desc.type = self.__op_proto__.type
# Attrs
for attr in self.__op_proto__.attrs:
if attr.generated:
continue
user_defined_attr = kwargs.get(attr.name, None)
if user_defined_attr is not None:
new_attr = op_desc.attrs.add()
new_attr.name = attr.name
new_attr.type = attr.type
if isinstance(user_defined_attr, np.ndarray):
user_defined_attr = user_defined_attr.tolist()
if attr.type == framework_pb2.INT:
new_attr.i = user_defined_attr
elif attr.type == framework_pb2.FLOAT:
new_attr.f = user_defined_attr
elif attr.type == framework_pb2.STRING:
new_attr.s = user_defined_attr
elif attr.type == framework_pb2.BOOLEAN:
new_attr.b = user_defined_attr
elif attr.type == framework_pb2.INTS:
new_attr.ints.extend(user_defined_attr)
elif attr.type == framework_pb2.FLOATS:
new_attr.floats.extend(user_defined_attr)
elif attr.type == framework_pb2.STRINGS:
new_attr.strings.extend(user_defined_attr)
elif attr.type == framework_pb2.BOOLEANS:
new_attr.bools.extend(user_defined_attr)
elif attr.type == framework_pb2.INT_PAIRS:
for p in user_defined_attr:
pair = new_attr.int_pairs.add()
pair.first = p[0]
pair.second = p[1]
else:
raise NotImplementedError(
"A not supported attribute type: %s." % (
str(attr.type)))
return op_desc
@staticmethod
def any_is_true(generator):
"""
Reduce a boolean array to a single boolean parameter. If any element in
the array is True, this function will return True, otherwise False.
"""
for flag in generator:
if flag:
return True
return False
class OpInfo(object):
def __init__(self, name, method, inputs, outputs, attrs):
self.name = name
self.method = method
self.inputs = inputs
self.outputs = outputs
self.attrs = attrs
def create_op_creation_method(op_proto):
"""
Generate op creation method for an OpProto.
"""
method = OpDescCreationMethod(op_proto)
def __impl__(*args, **kwargs):
opdesc = method(*args, **kwargs)
return core.Operator.create(opdesc.SerializeToString())
return OpInfo(
method=__impl__,
name=op_proto.type,
inputs=[(var.name, var.duplicable) for var in op_proto.inputs],
outputs=[(var.name, var.duplicable) for var in op_proto.outputs],
attrs=[attr.name for attr in op_proto.attrs])
class OperatorFactory(object):
def __init__(self):
self.op_methods = dict()
for op_proto in get_all_op_protos():
method = create_op_creation_method(op_proto)
self.op_methods[method.name] = method
def __call__(self, *args, **kwargs):
if "type" in kwargs:
if len(args) != 0:
raise ValueError(
"Except the argument \"type\","
"all of the other arguments should be keyword arguments.")
t = kwargs.pop("type")
else:
if len(args) != 1:
raise ValueError(
"Except the argument \"type\","
"all of the other arguments should be keyword arguments.")
t = args[0]
return self.get_op_info(t).method(**kwargs)
def types(self):
return list(self.op_methods.keys())
def get_op_info(self, t):
if t not in self.op_methods:
raise ValueError("The operator: %s is not registered." % t)
return self.op_methods.get(t)
def get_op_input_names(self, type):
return [x[0] for x in self.get_op_info(type).inputs]
def get_op_inputs(self, type):
return self.get_op_info(type).inputs
def get_op_output_names(self, type):
return [x[0] for x in self.get_op_info(type).outputs]
def get_op_outputs(self, type):
return self.get_op_info(type).outputs
def get_op_attr_names(self, type):
return self.get_op_info(type).attrs
class __RecurrentOp__(object):
__proto__ = None
type = "recurrent"
def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto
def __call__(self, *args, **kwargs):
|
ic-hep/DIRAC
|
src/DIRAC/Core/Utilities/MemStat.py
|
Python
|
gpl-3.0
| 1,174 | 0.000852 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
__RCSID__ =
|
"$Id$"
def VmB(vmKey):
__memScale = {"kB": 1024.0, "mB": 1024.0 * 1024.0, "KB": 1024.0, "MB": 1024.0 * 1024.0}
__vmKeys = [
"VmPeak:",
"VmSize:",
"VmLck:",
"VmHWM:",
"VmRSS:",
"VmData:",
"VmStk:",
"VmExe:",
|
"VmLib:",
"VmPTE:",
"VmPeak",
"VmSize",
"VmLck",
"VmHWM",
"VmRSS",
"VmData",
"VmStk",
"VmExe",
"VmLib",
"VmPTE",
]
if vmKey not in __vmKeys:
return 0
procFile = "/proc/%d/status" % os.getpid()
# get pseudo file /proc/<pid>/status
try:
with open(procFile) as myFile:
value = myFile.read()
except Exception:
return 0.0 # non-Linux?
# get vmKey line e.g. 'VmRSS: 9999 kB\n ...'
i = value.index(vmKey)
value = value[i:].split(None, 3) # whitespace
if len(value) < 3:
return 0.0 # invalid format?
# convert Vm value to bytes
return float(value[1]) * __memScale[value[2]]
|
laloxxx20/TiendaVirtual
|
setup.py
|
Python
|
apache-2.0
| 293 | 0 |
#!/usr/bin/env python
from setuptools import setup
set
|
up(
name='YourAppName',
version='1.0',
description='OpenShift App',
author='Your Name',
author_email='example@example.com',
url='http://www.python.org/
|
sigs/distutils-sig/',
install_requires=['Django<=1.4'],
)
|
claytantor/grailo
|
feeds/models.py
|
Python
|
lgpl-3.0
| 6,456 | 0.006506 |
from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
# relational databases are a terrible way to do
# multicast messages (just ask Twitter) but here you have it :-)
import re
reply_re = re.compile("^@(\w+)")
class Templar(models.Model):
user = models.ForeignKey(User)
public_key = models.CharField(max_length=250, unique=True)
get_absolute_url = models.CharField(max_length=250)
pw_encrypted = models.TextField()
avatar = models.TextField()
class Feed(models.Model):
title = models.CharField(max_length=250)
public_key = models.CharField(max_length=250)
created = models.DateTimeField(_('created'), default=datetime.now)
#relationships
owner = models.ForeignKey(Templar, related_name='feeds',null=True, blank=True)
followers = models.ManyToManyField(
'Templar',
through='FeedFollowers',
blank=True, null=True)
class Message(models.Model):
feed = models.ForeignKey(Feed,null=True, related_name='messages', blank=True)
templar = models.ForeignKey(Templar, related_name='messages',null=True, blank=True)
reply_to = models.ForeignKey('Message', related_name='replies', null=True, blank=True)
text = models.CharField(max_length=250)
sent = models.DateTimeField(_('sent'), default=datetime.now)
class FeedFollowers(models.Model):
feed = models.ForeignKey(Feed,null=True,blank=True)
follower = models.ForeignKey('Templar',null=True,blank=True)
def __unicode__(self):
return self.feed.name+' '+self.follower.user.username
#class Message(models.Model):
# """
# a single message from a user
# """
#
# text = models.CharField(_('text'), max_length=140)
# sender_type = models.ForeignKey(ContentType)
# sender_id = models.PositiveIntegerField()
# sender = generic.GenericForeignKey('sender_type', 'sender_id')
# sent = models.DateTimeField(_('sent'), default=datetime.now)
#
# def __unicode__(self):
# return self.text
#
# def get_absolute_url(self):
# return ("single_message", [self.id])
# get_absolute_url = models.permalink(get_absolute_url)
#
# class Meta:
# ordering = ('-sent',)
#
#
#class MessageInstanceManager(models.Manager):
#
# def messages_for(self, recipient):
# recipient_type = ContentType.objects.get_for_model(recipient)
# return MessageInstance.objects.filter(recipient_type=recipient_type, recipient_id=recipient.id)
#
#
#class MessageInstance(models.Model):
# """
# the appearance of a message in a follower's timeline
#
# denormalized for better performance
# """
#
# text = models.CharField(_('text'), max_length=140)
# sender_type = models.ForeignKey(ContentType, related_name='message_instances')
# sender_id = models.PositiveIntegerField()
# sender = generic.GenericForeignKey('sender_type', 'sender_id')
# sent = models.DateTimeField(_('sent'))
#
# # to migrate to generic foreign key, find out the content_type id of User and do something like:
# # ALTER TABLE "microblogging_messageinstance"
# # ADD COLUMN "recipient_type_id" integer NOT NULL
# # REFERENCES "django_content_type" ("id")
# # DEFAULT <user content type id>;
# #
# # NOTE: you will also need to drop the foreign key constraint if it exists
#
# # recipient = models.ForeignKey(User, related_name="received_message_instances", verbose_name=_('recipient'))
#
# recipient_type = models.ForeignKey(ContentType)
# recipient_id = models.PositiveIntegerField()
# recipient = generic.GenericForeignKey('recipient_type', 'recipient_id')
#
# objects = MessageInstanceManager()
#
#
#def message(sender, instance, created, **kwargs):
# #if message is None:
# # message = Message.objects.create(text=text, sender=user)
# recipients = set() # keep track of who's received it
# user = instance.sender
#
# # add the sender's followers
# user_content_type = ContentType.objects.get_for_model(user)
# followings = Following.objects.filter(followed_content_type=user_content_type, followed_object_id=user.id)
# for follower in (following.follower_content_object for following in followings
|
):
# recipients.add(follower)
#
# # add sender
# recipients.add(user)
#
# # if starts with @user send it to them too even if not following
# match = reply_re.match(instance.text)
|
# if match:
# try:
# reply_recipient = User.objects.get(username=match.group(1))
# recipients.add(reply_recipient)
# except User.DoesNotExist:
# pass # oh well
# else:
# if notification:
# notification.send([reply_recipient], "message_reply_received", {'message': instance,})
#
# # now send to all the recipients
# for recipient in recipients:
# message_instance = MessageInstance.objects.create(text=instance.text, sender=user, recipient=recipient, sent=instance.sent)
#
#
#class FollowingManager(models.Manager):
#
# def is_following(self, follower, followed):
# try:
# following = self.get(follower_object_id=follower.id, followed_object_id=followed.id)
# return True
# except Following.DoesNotExist:
# return False
#
# def follow(self, follower, followed):
# if follower != followed and not self.is_following(follower, followed):
# Following(follower_content_object=follower, followed_content_object=followed).save()
#
# def unfollow(self, follower, followed):
# try:
# following = self.get(follower_object_id=follower.id, followed_object_id=followed.id)
# following.delete()
# except Following.DoesNotExist:
# pass
#
#
#class Following(models.Model):
# follower_content_type = models.ForeignKey(ContentType, related_name="followed", verbose_name=_('follower'))
# follower_object_id = models.PositiveIntegerField()
# follower_content_object = generic.GenericForeignKey('follower_content_type', 'follower_object_id')
#
# followed_content_type = models.ForeignKey(ContentType, related_name="followers", verbose_name=_('followed'))
# followed_object_id = models.PositiveIntegerField()
# followed_content_object = generic.GenericForeignKey('followed_content_type', 'followed_object_id')
#
# objects = FollowingManager()
#
#post_save.connect(message, sender=Message)
|
saydulk/sogo
|
Tests/Integration/test-carddav.py
|
Python
|
gpl-2.0
| 6,859 | 0.004228 |
#!/usr/bin/python
from config import hostname, port, username, password
import carddav
import sogotests
import unittest
import webdavlib
import time
class JsonDavEventTests(unittest.TestCase):
def setUp(self):
self._connect_as_user()
def _connect_as_user(self, newuser=username, newpassword=password):
self.dv = carddav.Carddav(newuser, newpassword)
def _create_new_event(self, path):
gid = self.dv.newguid(path)
event = {'startDate': "2015-12-25",
'startTime': "10:00",
'endDate': "2015-12-25",
'endTime': "23:00",
'isTransparent': 0,
'sendAppointmentNotifications': 0,
'summary': "Big party",
'alarm': {'action': 'display',
'quantity': 10,
'unit': "MINUTES",
'reference': "BEFORE",
'relation': "START",
'email': "sogo1@example.com"},
'organizer': {'name': u"Balthazar C\xe9sar",
'email': "sogo2@example.com"},
'c_name': gid,
'c_folder': path
}
return (event, path, gid)
def _get_dav_data(self, filename, user=username, passwd=password):
w = webdavlib.WebDAVClient(hostname, port, user, passwd)
query = webdavlib.HTTPGET("http://localhost/SOGo/dav/%s/Calendar/personal/%s" % (username, filename))
w.execute(query)
self.assertEquals(query.response['status'], 200)
return query.response['body'].split("\r\n")
def _get_dav_field(self, davdata, fieldname):
try:
data = [a.split(':')[1] for a in davdata if fieldname in a][0]
except IndexError:
data = ''
return data
def test_create_new_event(self):
path = 'Calendar/personal'
(event, folder, gid) = self._create_new_event(path)
#print "Saving Event to:", folder, gid
self.dv.save_event(event, folder, gid)
#- Get the event back with JSON
self._connect_as_user()
self.dv.load_events()
elist = [e for e in self.dv.events if e['c_name'] == gid]
#- MUST have this event -- only once
self.assertEquals(len(elist), 1)
strdate = "%d-%.02d-%.02d" % time.gmtime(elist[0]['c_startdate'])[0:3]
self.assertEquals(strdate, event['startDate'])
#- Get the event back with DAV
dav = self._get_dav_data(gid, username, password)
self.assertEquals(self._get_dav_field(dav, 'SUMMARY:'), event['summary'])
class JsonDavPhoneTests(unittest.TestCase):
def setUp(self):
self._connect_as_user()
self.newphone = [{'type': 'home', 'value': '123.456.7890'}]
self.newphones_difftype = [{'type': 'home', 'value': '123.456.7890'},
{'type': 'work', 'value': '987.654.3210'},
{'type': 'fax', 'value': '555.666.7777'}]
self.newphones_sametype = [{'type': 'work', 'value': '123.456.7890'},
{'type': 'work', 'value': '987.654.3210'}]
# Easier to erase them all in tearDown
self.allphones = list(self.newphone)
self.allphones.extend(self.newphones_difftype)
self.allphones.extend(self.newphones_sametype)
#- In case there are no cards for this user
try:
self._get_card()
except IndexError:
path = 'Contacts/personal'
(card, path, gid) = self._create_new_card(path)
self._save_card(card)
def tearDown(self):
self._connect_as_user()
self._get_card()
#- Remove the phones we just added
for phone in self.allphones:
try:
self.card['phones'].pop(self.card['phones'].index(phone))
|
except ValueError:
#print "Can't find", phone
pass
self._save_card()
def _connect_as_user(se
|
lf, newuser=username, newpassword=password):
self.dv = carddav.Carddav(newuser, newpassword)
def _create_new_card(self, path):
gid = self.dv.newguid(path)
card = {'c_categories': None,
'c_cn': 'John Doe',
'c_component': 'vcard',
'c_givenname': 'John Doe',
'c_mail': 'johndoe@nothere.com',
'c_name': gid,
'c_o': '',
'c_screenname': '',
'c_sn': '',
'c_telephonenumber': '123.456.7890',
'emails': [{'type': 'pref', 'value': 'johndoe@nothere.com'}],
'phones': [{'type': 'home', 'value': '111.222.3333'}],
'id': gid}
return (card, path, gid)
def _get_card(self, name="John Doe"):
tmp_card = self.dv.get_cards(name)[0]
self.card = self.dv.get_card(tmp_card['c_name'])
def _save_card(self, card=None):
if card:
self.dv.save_card(card)
else:
self.dv.save_card(self.card)
def _get_dav_data(self, filename, user=username, passwd=password):
w = webdavlib.WebDAVClient(hostname, port, user, passwd)
query = webdavlib.HTTPGET("http://localhost/SOGo/dav/%s/Contacts/personal/%s" % (username, filename))
w.execute(query)
self.assertEquals(query.response['status'], 200)
return query.response['body'].split("\r\n")
def _phone_to_dav_str(self, phonedict):
return "TEL;TYPE=%s:%s" % (phonedict['type'], phonedict['value'])
def _testMultiplePhones(self, phones):
""" Add Multiple Phones to Contact JSON and verify with DAV """
#- Use JSON to get CARD and add a phone and save it back
self._get_card()
oldphones = self.card['phones']
oldphones.extend(phones)
self._save_card()
#- Make sure that the phone is there when using JSON
self._connect_as_user()
self._get_card()
#print "C:::", self.card
testphones = self.card['phones']
#print "P1:", oldphones
#print "P2:", testphones
self.assertEquals(sorted(oldphones), sorted(testphones))
#- Verify that DAV has the same values
dav = self._get_dav_data(self.card['id'], username, password)
for phone in phones:
found = dav.index(self._phone_to_dav_str(phone))
self.assertTrue(found > 0)
def testSinglePhone(self):
self._testMultiplePhones(self.newphone)
def testMultipleDifferentPhones(self):
self._testMultiplePhones(self.newphones_difftype)
def testMultipleSameTypePhones(self):
self._testMultiplePhones(self.newphones_sametype)
if __name__ == "__main__":
sogotests.runTests()
|
orlandov/parpg-game
|
local_loaders/xmlmap.py
|
Python
|
gpl-3.0
| 13,207 | 0.007496 |
#!/usr/bin/python
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Most of this code was copied from the FIFE file xmlmap.py
# It is part of the local code base now so we can customize what happens
# as we read map files
import fife
try:
import xml.etree.cElementTree as ET
except:
import xml.etree.ElementTree as ET
import loaders
from serializers import *
import time
FORMAT = '1.0'
class XMLMapLoader(fife.ResourceLoader):
def __init__(self, engine, data, callback):
""" The XMLMapLoader parses the xml map using several section.
Each section fires a callback (if given) which can e. g. be
used to show a progress bar.
The callback sends two values, a string and a float (which shows
the overall process): callback(string, float)
Inputs:
engine = FIFE engine
data = Engine object for PARPG data
callback = function callback
"""
fife.ResourceLoader.__init__(self)
self.thisown = 0
self.callback = callback
self.engine = engine
self.data = data
self.vfs = self.engine.getVFS()
self.model = self.engine.getModel()
self.pool = self.engine.getImagePool()
self.anim_pool = self.engine.getAnimationPool()
self.map = None
self.source = None
self.time_to_load = 0
self.nspace = None
def _err(self, msg):
raise SyntaxError(''.join(['File: ', self.source, ' . ', msg]))
def loadResource(self, location):
start_time = time.time()
self.source = location.getFilename()
f = self.vfs.open(self.source)
f.thisown = 1
tree = ET.parse(f)
root = tree.getroot()
map = self.parseMap(root)
self.time_to_load = time.time() - start_time
return map
def parseMap(self, map_elt):
if not map_elt:
self._err('No <map> element found at top level of map file definition.')
id,format = map_elt.get('id'),map_elt.get('format')
if not format == FORMAT: self._err(''.join(['This file has format ', format, ' but this loader has format ', FORMAT]))
if not id: self._err('Map declared without an identifier.')
map = None
try:
self.map = self.model.createMap(str(id))
self.map.setResourceFile(self.source)
except fife.Exception, e: # NameClash appears as general fife.Exception; any ideas?
print e.getMessage()
print ''.join(['File: ', self.source, '. The map ', str(id), ' already exists! Ignoring map definition.'])
return None
# xml-specific directory imports. This is used by xml savers.
self.map.importDirs = []
if self.callback is not None:
self.callback('created map', float(0.25) )
self.parseImports(map_elt, self.map)
self.parseLayers(map_elt, self.map)
self.parseCameras(map_elt, self.map)
return self.map
def parseImports(self, map_elt, map):
parsedImports = {}
if self.callback:
tmplist = map_elt.findall('import')
i = float(0)
for item in map_elt.findall('import'):
file = item.get('file')
if file:
file = reverse_root_subfile(self.source, file)
dir = item.get('dir')
|
if dir:
dir = reverse_root_subf
|
ile(self.source, dir)
# Don't parse duplicate imports
if (dir,file) in parsedImports:
print "Duplicate import:" ,(dir,file)
continue
parsedImports[(dir,file)] = 1
if file and dir:
loaders.loadImportFile('/'.join(dir, file), self.engine)
elif file:
loaders.loadImportFile(file, self.engine)
elif dir:
loaders.loadImportDirRec(dir, self.engine)
map.importDirs.append(dir)
else:
print 'Empty import statement?'
if self.callback:
i += 1
self.callback('loaded imports', float( i / float(len(tmplist)) * 0.25 + 0.25 ) )
def parseLayers(self, map_elt, map):
if self.callback is not None:
tmplist = map_elt.findall('layer')
i = float(0)
for layer in map_elt.findall('layer'):
id = layer.get('id')
grid_type = layer.get('grid_type')
x_scale = layer.get('x_scale')
y_scale = layer.get('y_scale')
rotation = layer.get('rotation')
x_offset = layer.get('x_offset')
y_offset = layer.get('y_offset')
pathing = layer.get('pathing')
if not x_scale: x_scale = 1.0
if not y_scale: y_scale = 1.0
if not rotation: rotation = 0.0
if not x_offset: x_offset = 0.0
if not y_offset: y_offset = 0.0
if not pathing: pathing = "cell_edges_only"
if not id: self._err('<layer> declared with no id attribute.')
if not grid_type: self._err(''.join(['Layer ', str(id), ' has no grid_type attribute.']))
allow_diagonals = pathing == "cell_edges_and_diagonals"
cellgrid = self.model.getCellGrid(grid_type)
if not cellgrid: self._err('<layer> declared with invalid cellgrid type. (%s)' % grid_type)
cellgrid.setRotation(float(rotation))
cellgrid.setXScale(float(x_scale))
cellgrid.setYScale(float(y_scale))
cellgrid.setXShift(float(x_offset))
cellgrid.setYShift(float(y_offset))
layer_obj = None
try:
layer_obj = map.createLayer(str(id), cellgrid)
except fife.Exception, e:
print e.getMessage()
print 'The layer ' + str(id) + ' already exists! Ignoring this layer.'
continue
strgy = fife.CELL_EDGES_ONLY
if pathing == "cell_edges_and_diagonals":
strgy = fife.CELL_EDGES_AND_DIAGONALS
if pathing == "freeform":
strgy = fife.FREEFORM
layer_obj.setPathingStrategy(strgy)
self.parseInstances(layer, layer_obj)
if self.callback is not None:
i += 1
self.callback('loaded layer :' + str(id), float( i / float(len(tmplist)) * 0.25 + 0.5 ) )
# cleanup
if self.callback is not None:
del tmplist
del i
def parseInstances(self, layerelt, layer):
instelt = layerelt.find('instances')
instances = instelt.findall('i')
instances.extend(instelt.findall('inst'))
instances.extend(instelt.findall('instance'))
for instance in instances:
objectID = instance.get('object')
if not objectID:
objectID = instance.get('obj')
if not objectID:
objectID = instance.get('o')
if not objectID: self._err('<instance> does not specify an object attribute.')
nspace = instance.get('namespace')
if not nspace:
nspace = instance.get('ns')
|
rbuffat/pyidf
|
tests/test_glazeddoorinterzone.py
|
Python
|
apache-2.0
| 2,670 | 0.003745 |
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.thermal_zones_and_surfaces import GlazedDoorInterzone
log = logging.getLogger(__name__)
class TestGlazedDoorInterzone(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_glazeddoorinterzone(self):
pyidf.validation_level = ValidationLevel.error
obj = GlazedDoorInterzone()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_construction_name = "object-list|Construction Name"
obj.construction_name = var_construction_name
# object-list
var_building_surface_name = "object-list|Building Surface Name"
obj.building_surface_name = var_building_surface_name
# object-list
var_outside_boundary_condition_object = "object-list|Outside Boundary Condition Object"
obj.outside_boundary_condition_object = var_outside_boundary_condition_object
# real
var_multiplier = 1.0
obj.multiplier = var_multiplier
# real
var_starting_x_coordinate = 6.6
obj.starting_x_coordinate = var_starting_x_coordinate
# real
var_starting_z_coordinate = 7.7
obj.starting_z_coordinate = var_starting_z_coordinate
# real
var_length = 8.8
obj.length = var_length
# real
var_height = 9.9
obj.height = var_height
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
|
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.glazeddoorinterzones[0].name, var_name)
self.assertEqual(idf2.glazeddoorinterzones[0].construction_name, var_construction_name)
|
self.assertEqual(idf2.glazeddoorinterzones[0].building_surface_name, var_building_surface_name)
self.assertEqual(idf2.glazeddoorinterzones[0].outside_boundary_condition_object, var_outside_boundary_condition_object)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].multiplier, var_multiplier)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].starting_x_coordinate, var_starting_x_coordinate)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].starting_z_coordinate, var_starting_z_coordinate)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].length, var_length)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].height, var_height)
|
Aluriak/neural_world
|
neural_world/prompt.py
|
Python
|
gpl-2.0
| 4,845 | 0.009701 |
"""
Definition of the Prompt class, designed for editing Configuration
with a terminal prompt.
"""
from functools import partial
from prompt_toolkit import prompt
from prompt_toolkit.contrib.regular_languages.compiler import compile as pt_compile
from prompt_toolkit.contrib.completers import WordCompleter
from prompt_toolkit.contrib.regular_languages.completion import GrammarCompleter
import neural_world.commons as commons
import neural_world.actions as actions
LOGGER = commons.logger()
PROMPT_WELCOME = '?> '
COMMAND_NAMES = {
# command id: command aliases
'quit': ('quit', 'exit', ':q', 'q'),
'help': ('help', 'wtf', ':h', 'h'),
'conf': ('config', 'conf', ':c', 'c', ':p', 'p'),
'set' : ('set', ':s', 's'),
'get' : ('get', ':g', 'g'),
'apply': ('apply', ':a', 'a'),
}
def commands_grammar(config, commands=COMMAND_NAMES):
"""Return a grammar for given commands (dict command:aliases)
that use given Configuration for field autocompletion.
"""
def aliases(cmd):
"""access the aliases of given (sub)command.
if not in commands dict, will use it as an iterable."""
try: return '|'.join(commands[cmd])
except KeyError: return '|'.join(cmd)
def cmd2reg(cmd, subcmd=None, args=None):
"""layout automatization"""
return (
'(\s* (?P<cmd>(' + aliases(cmd) + '))'
+ ('' if subcmd is None
else ('\s+ (?P<subcmd>('+ aliases(subcmd) + ')) \s* '))
+ ('' if args is None else ('\s+ (?P<args>(.*)) \s* '))
+ ') |\n'
)
# get grammar, log it and return it
grammar = (
cmd2reg('quit', None, None)
+ cmd2reg('help', None, None)
+ cmd2reg('conf', None, None)
+ cmd2reg('set', config.mutable_fields, True)
+ cmd2reg('get', config.all_fields, None)
+ cmd2reg('apply', None, None)
)
LOGGER.debug('PROMPT GRAMMAR:\n' + str(grammar))
return pt_compile(grammar)
class Prompt(actions.ActionEmitter):
def __init__(self, config, invoker):
super().__init__(invoker)
self.config = config
self.grammar = commands_grammar(config)
completer = GrammarCompleter(
self.grammar,
{'subcmd': WordCompleter(tuple(config.all_fields))}
)
self._get_input = partial(prompt, PROMPT_WELCOME, completer=completer)
def input(self):
"""Handle user input, until user want to apply the config"""
while not self._handle(self._get_input()): pass
def _handle(self, input_text):
"""Return True when the user asks for leave the prompt"""
match = self.grammar.match(input_text)
if match is None:
print('invalid command')
return False # do not quit the prompt
elif len(input_text) == 0:
return False
else:
|
values
|
= match.variables()
subcmd = values.get('subcmd')
args = values.get('args')
cmd = next( # get root name, not an alias
cmd_name
for cmd_name, aliases in COMMAND_NAMES.items()
if values.get('cmd') in aliases
)
# call function associated with the command
leave_prompt = bool(getattr(self, 'on_' + cmd)(subcmd, args))
return leave_prompt
return False
def on_quit(self, subcmd:None=None, args:None=None):
"""send a quit request to the simulation, and leave the prompt"""
self.invoker.add(actions.QuitAction())
return True # leave the prompt
def on_apply(self, subcmd:None=None, args:None=None):
"""Leave the prompt, then apply the configuration to the simulation"""
return True # leave the prompt
def on_conf(self, subcmd:None=None, args:None=None):
"""show the config"""
print(self.config)
def on_set(self, config_field, args):
"""set given value for given mutable config field
ex: set mutation_rate 0.2"""
setattr(self.config, config_field, args)
print(config_field, 'set to', getattr(self.config, config_field))
def on_get(self, config_field, args:None=None):
"""show value of given config field
ex: get space_height"""
print(config_field + ':', getattr(self.config, config_field))
def on_help(self, subcmd:None=None, args:None=None):
"""show this help"""
callbacks = tuple(sorted(
attrname[3:] for attrname in self.__dir__()
if attrname.startswith('on_')
))
maxlen = len(max(callbacks, key=len))
# printings !
for callback in callbacks:
print(callback.rjust(maxlen) + ':',
getattr(self, 'on_' + callback).__doc__)
|
jbuchbinder/youtube-dl
|
youtube_dl/__init__.py
|
Python
|
unlicense
| 18,757 | 0.001546 |
#!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
__license__ = 'Public Domain'
import codecs
import io
import os
import random
import sys
from .options import (
parseOpts,
)
from .compat import (
compat_expanduser,
compat_getpass,
compat_shlex_split,
workaround_optparse_bug9161,
)
from .utils import (
DateRange,
decodeOption,
DEFAULT_OUTTMPL,
DownloadError,
match_filter_func,
MaxDownloadsReached,
preferredencoding,
read_batch_urls,
SameFileError,
setproctitle,
std_headers,
write_string,
render_table,
)
from .update import update_self
from .downloader import (
FileDownloader,
)
from .extractor import gen_extractors, list_extractors
from .extractor.adobepass import MSO_INFO
from .YoutubeDL import YoutubeDL
def _real_main(argv=None):
# Compatibility fixes for Windows
if sys.platform == 'win32':
# https://github.com/rg3/youtube-dl/issues/820
codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)
workaround_optparse_bug9161()
setproctitle('youtube-dl')
parser, opts, args = parseOpts(argv)
# Set user agent
if opts.user_agent is not None:
std_headers['User-Agent'] = opts.user_agent
# Set referer
if opts.referer is not None:
std_headers['Referer'] = opts.referer
# Custom HTTP headers
if opts.headers is not None:
for h in opts.headers:
if ':' not in h:
parser.error('wrong header formatting, it should be key:value, not "%s"' % h)
key, value = h.split(':', 1)
if opts.verbose:
write_string('[debug] Adding header from command line option %s:%s\n' % (key, value))
std_headers[key] = value
# Dump user agent
if opts.dump_user_agent:
write_string(std_headers['User-Agent'] + '\n', out=sys.stdout)
sys.exit(0)
# Batch file verification
batch_urls = []
if opts.batchfile is not None:
try:
if opts.batchfile == '-':
batchfd = sys.stdin
else:
batchfd = io.open(
compat_expanduser(opts.batchfile),
'r', encoding='utf-8', errors='ignore')
batch_urls = read_batch_urls(batchfd)
if opts.verbose:
write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n')
except IOError:
sys.exit('ERROR: batch file could not be read')
all_urls = batch_urls + args
all_urls = [url.strip() for url in all_urls]
_enc = preferredencoding()
all_urls = [url.decode(_enc, 'ignore') if isinstance(url, bytes) else url for url in all_urls]
if opts.list_extractors:
for ie in list_extractors(opts.age_limit):
write_string(ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie._WORKING else '') + '\n', out=sys.stdout)
matchedUrls = [url for url in all_urls if ie.suitable(url)]
for mu in matchedUrls:
write_string(' ' + mu + '\n', out=sys.stdout)
sys.exit(0)
if opts.list_extractor_descriptions:
for ie in list_extractors(opts.age_limit):
if not ie._WORKING:
continue
desc = getattr(ie, 'IE_DESC', ie.IE_NAME)
if desc is False:
continue
if hasattr(ie, 'SEARCH_KEY'):
_SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow')
_COUNTS = ('', '5', '10', 'all')
desc += ' (Example: "%s%s:%s" )' % (ie.SEARCH_KEY, random.choice(_COUNTS), random.choice(_SEARCHES))
write_string(desc + '\n', out=sys.stdout)
sys.exit(0)
if opts.ap_list_mso:
table = [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()]
write_string('Supported TV Providers:\n' + render_table(['mso', 'mso name'], table) + '\n', out=sys.stdout)
sys.exit(0)
# Conflicting, missing and erroneous options
if opts.usenetrc and (opts.username is not None or opts.password is not None):
parser.error('using .netrc conflicts with giving username/password')
if opts.password is not None and opts.username is None:
parser.error('account username missing\n')
if opts.ap_password is not None and opts.ap_username is None:
parser.error('TV Provider account username missing\n')
if opts.outtmpl is not None and (opts.usetitle or opts.autonumber or opts.useid):
parser.error('using output template conflicts with using title, video ID or auto number')
if opts.usetitle and opts.useid:
parser.error('using title conflicts with using video ID')
if opts.username is not None and opts.password is None:
opts.password = compat_getpass
|
('Type account password and press [Return]: ')
if opts.ap_username is not None and opts.ap_password is None:
opts.ap_password = compat_getpass('Type TV provider account password and press [Ret
|
urn]: ')
if opts.ratelimit is not None:
numeric_limit = FileDownloader.parse_bytes(opts.ratelimit)
if numeric_limit is None:
parser.error('invalid rate limit specified')
opts.ratelimit = numeric_limit
if opts.min_filesize is not None:
numeric_limit = FileDownloader.parse_bytes(opts.min_filesize)
if numeric_limit is None:
parser.error('invalid min_filesize specified')
opts.min_filesize = numeric_limit
if opts.max_filesize is not None:
numeric_limit = FileDownloader.parse_bytes(opts.max_filesize)
if numeric_limit is None:
parser.error('invalid max_filesize specified')
opts.max_filesize = numeric_limit
if opts.sleep_interval is not None:
if opts.sleep_interval < 0:
parser.error('sleep interval must be positive or 0')
if opts.max_sleep_interval is not None:
if opts.max_sleep_interval < 0:
parser.error('max sleep interval must be positive or 0')
if opts.max_sleep_interval < opts.sleep_interval:
parser.error('max sleep interval must be greater than or equal to min sleep interval')
else:
opts.max_sleep_interval = opts.sleep_interval
if opts.ap_mso and opts.ap_mso not in MSO_INFO:
parser.error('Unsupported TV Provider, use --ap-list-mso to get a list of supported TV Providers')
def parse_retries(retries):
if retries in ('inf', 'infinite'):
parsed_retries = float('inf')
else:
try:
parsed_retries = int(retries)
except (TypeError, ValueError):
parser.error('invalid retry count specified')
return parsed_retries
if opts.retries is not None:
opts.retries = parse_retries(opts.retries)
if opts.fragment_retries is not None:
opts.fragment_retries = parse_retries(opts.fragment_retries)
if opts.buffersize is not None:
numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize)
if numeric_buffersize is None:
parser.error('invalid buffer size specified')
opts.buffersize = numeric_buffersize
if opts.playliststart <= 0:
raise ValueError('Playlist start must be positive')
if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart:
raise ValueError('Playlist end must be greater than playlist start')
if opts.extractaudio:
if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']:
parser.error('invalid audio format specified')
if opts.audioquality:
opts.audioquality = opts.audioquality.strip('k').strip('K')
if not opts.audioquality.isdigit():
parser.error('invalid audio quality specified')
if opts.recodevideo is not None:
if opts.recodevideo not in ['mp4', 'flv', 'webm', 'ogg', 'mkv', 'avi']:
parser.error('invalid video recode format specified')
if opts.convertsubtitles is not None:
if opts.conv
|
juliend88/os_image_factory
|
test-tools/pytesting_os/openstackutils.py
|
Python
|
gpl-3.0
| 8,608 | 0.015335 |
#!/usr/bin/env python
#-*- coding: utf-8 -
import keystoneclient.v2_0.client as keystone
from keystoneauth1.identity import v2
from keystoneauth1 import session
import novaclient.client as nova
import cinderclient.client as cinder
from glanceclient.v1 import client as glance
import neutronclient.v2_0.client as neutron
import heatclient.client as heat
import time, paramiko,os,re,errno
from socket import error as socket_error
from os import environ as env
class OpenStackUtils():
def __init__(self):
auth = v2.Password(auth_url=env['OS_AUTH_URL'],
username=env['OS_USERNAME'],
password=env['OS_PASSWORD'],
tenant_id=env['OS_TENANT_ID'])
sess = session.Session(auth=auth)
self.keystone_client = keystone.Client(username=env['OS_USERNAME'],
password=env['OS_PASSWORD'],
tenant_id=env['OS_TENANT_ID'],
auth_url=env['OS_AUTH_URL'],
region_name=env['OS_REGION_NAME'])
heat_url = self.keystone_client \
.service_catalog.url_for(service_type='orchestration',
endpoint_type='publicURL')
self.nova_client = nova.Client('2.1', region_name=env['OS_REGION_NAME'], session=sess)
self.cinder_client = cinder.Client('2', region_name=env['OS_REGION_NAME'], session=sess)
self.glance_client = glance.Client('2', region_name=env['OS_REGION_NAME'], session=sess)
self.neutron_client = neutron.Client(region_name=env['OS_REGION_NAME'], session=sess)
self.heat_client = heat.Client('1', region_name=env['OS_REGION_NAME'], endpoint=heat_url, session=sess)
def boot_vm_with_userdata_and_port(self,userdata_path,keypair,port):
#nics = [{'port-id': env['NOSE_PORT_ID']}]
nics = [{'port-id': port['port']['id'] }]
server = self.nova_client.servers.create(name="test-server-" + self.current_time_ms(), image=env['NOSE_IMAGE_ID'],
flavor=env['NOSE_FLAVOR'],userdata=file(userdata_path),key_name=keypair.name, nics=nics)
print 'Building, please wait...'
# wait for server create to be complete
self.wait_server_is_up(server)
self.wait_for_cloud_init(server)
return server
def boot_vm(self,image_id=env['NOSE_IMAGE_ID'],flavor=env['NOSE_FLAVOR'],keypair='default'):
nics = [{'net-id': env['NOSE_NET_ID']}]
server = self.nova_client.servers.create(name="test-server-" + self.current_time_ms(), image=image_id,security_groups=[env['NOSE_SG_ID']],
flavor=flavor, key_name=keypair.name, nics=nics)
print 'Building, please wait...'
self.wait_server_is_up(server)
self.wait_for_cloud_init(server)
return server
def get_server(self,server_id):
return self.nova_client.servers.get(server_id)
def destroy_server(self,server):
self.nova_client.servers.delete(server)
time.sleep(30)
def current_time_ms(self):
return str(int(round(time.time() * 1000)))
def get_console_log(self,server):
return self.nova_client.servers.get(server.id).get_console_output(length=600)
def get_spice_console(self,server):
return self.nova_client.servers.get(server.id).get_spice_console('spice-html5')
def create_server_snapshot(self,server):
return self.nova_client.servers.create_image(server,server.name+self.current_time_ms())
def get_image(self,image_id):
return self.glance_client.images.get(image_id)
def destroy_image(self,image_id):
self.glance_client.images.delete(image_id)
def initiate_ssh(self,floating_ip,private_key_filename):
ssh_connection = paramiko.SSHClient()
ssh_connection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
retries_left = 5
while True:
try:
ssh_connection.connect(floating_ip.ip,username='cloud',key_filename=private_key_filename,timeout=180)
break
except socket_error as e:
if e.errno != errno.ECONNREFUSED or retries_left <= 1:
raise e
time.sleep(10) # wait 10 seconds and retry
retries_left -= 1
return ssh_connection
def create_floating_ip(self):
return self.nova_client.floating_ips.create('public')
#def associate_floating_ip_to_port(self,floating_ip):
# self.neutron_client.update_floatingip(floating_ip.id,{'floatingip': {'port_id': env['NOSE_PORT_ID'] }})
def associate_floating_ip_to_server(self,floating_ip, server):
self.nova_client.servers.get(server.id).add_floating_ip(floating_ip.ip)
time.sleep(10)
def delete_floating_ip(self,floating_ip):
self.nova_client.floating_ips.delete(floating_ip.id)
def rescue(self,server):
self
|
.wait_server_available(server)
return self.nova_client.servers.g
|
et(server.id).rescue()
def unrescue(self,server):
self.wait_server_available(server)
return self.nova_client.servers.get(server.id).unrescue()
def attach_volume_to_server(self,server,volume):
#self.nova_client.volumes.create_server_volume(server_id=server.id,volume_id=env['NOSE_VOLUME_ID'])
self.nova_client.volumes.create_server_volume(server_id=server.id,volume_id=volume.id)
status =volume.status
while status != 'in-use':
status = self.cinder_client.volumes.get(volume.id).status
print status
print "volume is in use Now : "+ status
def detach_volume_from_server(self,server,volume):
#self.nova_client.volumes.delete_server_volume(server.id,env['NOSE_VOLUME_ID'])
self.nova_client.volumes.delete_server_volume(server.id,volume.id)
def get_flavor_disk_size(self,flavor_id):
return self.nova_client.flavors.get(flavor_id).disk
def server_reboot(self,server,type):
serv=self.get_server(server.id)
serv.reboot(reboot_type=type)
def wait_server_is_up(self,server):
status = server.status
while status != 'ACTIVE':
status = self.get_server(server.id).status
print "server is up"
def wait_for_cloud_init(self,server):
while True:
console_log = self.get_console_log(server)
if re.search('^.*Cloud-init .* finished.*$', console_log, flags=re.MULTILINE):
print("Cloudinit finished")
break
else:
time.sleep(10)
def wait_server_available(self,server):
task_state = getattr(server,'OS-EXT-STS:task_state')
while task_state is not None:
task_state = getattr(self.get_server(server.id),'OS-EXT-STS:task_state')
print "the server is available"
def create_keypair(self):
suffix =self.current_time_ms()
keypair= self.nova_client.keypairs.create(name="nose_keypair"+suffix)
private_key_filename = env['HOME']+'/key-'+suffix+'.pem'
fp = os.open(private_key_filename, os.O_WRONLY | os.O_CREAT, 0o600)
with os.fdopen(fp, 'w') as f:
f.write(keypair.private_key)
return keypair , private_key_filename
def delete_keypair(self,keypair,private_key_filename):
self.nova_client.keypairs.delete(keypair.id)
os.remove(private_key_filename)
def create_port_with_sg(self):
body_value = {'port': {
'admin_state_up': True,
'security_groups': [env['NOSE_SG_ID']],
'name': 'port-test'+self.current_time_ms(),
'network_id': env['NOSE_NET_ID'],
}}
port=self.neutron_client.create_port(body=body_value)
time.sleep(20)
return port
def delete_port(self,port):
self.neutron_client.delete_port(port['port']['id'])
def create_volume(self):
volume=self
|
dana-i2cat/felix
|
msjp/module/common/__init__.py
|
Python
|
apache-2.0
| 108 | 0.027778 |
import os, glob
__all__ = [os.path.bas
|
ename(f)[:-3] for f in glob.glob(os.path.dirname(__file__) +
|
"/*.py")]
|
daineseh/kodi-plugin.video.ted-talks-chinese
|
youtube_dl/extractor/xvideos.py
|
Python
|
gpl-2.0
| 2,444 | 0.001227 |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
from ..utils import (
clean_html,
ExtractorError,
determine_ext,
)
class XVideosIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?xvideos\.com/video(?P<id>[0-9]+)(?:.*)'
_TEST = {
'url': 'http://www.xvideos.com/video4588838/biker_takes_his_girl',
'md5': '4b46ae6ea5e6e9086e714d883313c0c9',
'info_dict': {
'id': '4588838',
'ext': 'flv',
'title': 'Biker Takes his Girl',
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
mobj = re.searc
|
h(r'<h1 class="inlineError">(.+?)</h1>', webpage)
if mobj:
raise ExtractorError('%s said: %s' % (self.IE_NAME, clean_html(mobj.group(1))), expected=True)
video_title = self._html_search_regex(
r'<title>(.*?)\s+-\s+XVID', webpage, 'title')
video_thumbnail = self._search_regex(
|
r'url_bigthumb=(.+?)&', webpage, 'thumbnail', fatal=False)
formats = []
video_url = compat_urllib_parse_unquote(self._search_regex(
r'flv_url=(.+?)&', webpage, 'video URL', default=''))
if video_url:
formats.append({'url': video_url})
player_args = self._search_regex(
r'(?s)new\s+HTML5Player\((.+?)\)', webpage, ' html5 player', default=None)
if player_args:
for arg in player_args.split(','):
format_url = self._search_regex(
r'(["\'])(?P<url>https?://.+?)\1', arg, 'url',
default=None, group='url')
if not format_url:
continue
ext = determine_ext(format_url)
if ext == 'mp4':
formats.append({'url': format_url})
elif ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4',
entry_protocol='m3u8_native', m3u8_id='hls', fatal=False))
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'title': video_title,
'thumbnail': video_thumbnail,
'age_limit': 18,
}
|
kohnle-lernmodule/exe201based
|
exe/export/pages.py
|
Python
|
gpl-2.0
| 8,659 | 0.004735 |
# ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Export Pages functions
"""
import logging
from urllib import quote
from exe.webui import common
log = logging.getLogger(__name__)
# ===========================================================================
class Page(object):
"""
This is an abstraction for a page containing a node
e.g. in a SCORM package or Website
"""
def __init__(self, name, depth, node):
"""
Initialize
"""
self.name = name
self.depth = depth
self.node = node
def renderLicense(self):
"""
Returns an XHTML string rendering the license.
"""
licenses = {"license GFDL": "http://www.gnu.org/copyleft/fdl.html",
"creative commons: attribution 2.5": "http://creativecommons.org/licenses/by/2.5/",
"creative commons: attribution - share alike 2.5": "http://creativecommons.org/licenses/by-sa/2.5/",
"creative commons: attribution - non derived work 2.5": "http://creativecommons.org/licenses/by-nd/2.5/",
"creative commons: attribution - non commercial 2.5": "http://creativecommons.org/licenses/by-nc/2.5/",
"creative commons: attribution - non commercial - share alike 2.5": "http://creativecommons.org/licenses/by-nc-sa/2.5/",
"creative commons: attribution - non derived work - non commercial 2.5": "http://creativecommons.org/licenses/by-nc-nd/2.5/",
"creative commons: attribution 3.0": "http://creativecommons.org/licenses/by/3.0/",
"creative commons: attribution - share alike 3.0": "http://creativecommons.org/licenses/by-sa/3.0/",
"creative commons: attribution - non derived work 3.0": "http://creativecommons.org/licenses/by-nd/3.0/",
"creative commons: attribution - non commercial 3.0": "http://creativecommons.org/licenses/by-nc/3.0/",
"creative commons: attribution - non commercial - share alike 3.0": "http://creativecommons.org/licenses/by-nc-sa/3.0/",
"creative commons: attribution - non derived work - non commercial 3.0": "http://creativecommons.org/licenses/by-nc-nd/3.0/",
"creative commons: attribution 4.0": "http://creativecommons.org/licenses/by/4.0/",
"creative commons: attribution - share alike 4.0": "http://creativecommons.org/licenses/by-sa/4.0/",
"creative commons: attribution - non derived work 4.0": "http://creativecommons.org/licenses/by-nd/4.0/",
"creative commons: attribution - non commercial 4.0": "http://creativecommons.org/licenses/by-nc/4.0/",
"creative commons: attribution - non commercial - share alike 4.0": "http://creativecommons.org/licenses/by-nc-sa/4.0/",
"creative commons: attribution - non derived work - non commercial 4.0": "http://creativecom
|
mons.org/licenses/by-nc-nd/4.0/",
"free software license GPL": "http://www.gnu.org/copyleft/gpl.html"
}
licenses_names = {"license GFDL": c_("GNU Free Documentation License"),
"creative commons: attribution 2.5": c_("Creative Commons Attribution License 2.5"),
"creative commons: attribution - share alike 2.5": c_("Creative Commons Attribution Share Alike License 2.5"),
|
"creative commons: attribution - non derived work 2.5": c_("Creative Commons Attribution No Derivatives License 2.5"),
"creative commons: attribution - non commercial 2.5": c_("Creative Commons Attribution Non-commercial License 2.5"),
"creative commons: attribution - non commercial - share alike 2.5": c_("Creative Commons Attribution Non-commercial Share Alike License 2.5"),
"creative commons: attribution - non derived work - non commercial 2.5": c_("Creative Commons Attribution Non-commercial No Derivatives License 2.5"),
"creative commons: attribution 3.0": c_("Creative Commons Attribution License 3.0"),
"creative commons: attribution - share alike 3.0": c_("Creative Commons Attribution Share Alike License 3.0"),
"creative commons: attribution - non derived work 3.0": c_("Creative Commons Attribution No Derivatives License 3.0"),
"creative commons: attribution - non commercial 3.0": c_("Creative Commons Attribution Non-commercial License 3.0"),
"creative commons: attribution - non commercial - share alike 3.0": c_("Creative Commons Attribution Non-commercial Share Alike License 3.0"),
"creative commons: attribution - non derived work - non commercial 3.0": c_("Creative Commons Attribution Non-commercial No Derivatives License 3.0"),
"creative commons: attribution 4.0": c_("Creative Commons Attribution License 4.0"),
"creative commons: attribution - share alike 4.0": c_("Creative Commons Attribution Share Alike License 4.0"),
"creative commons: attribution - non derived work 4.0": c_("Creative Commons Attribution No Derivatives License 4.0"),
"creative commons: attribution - non commercial 4.0": c_("Creative Commons Attribution Non-commercial License 4.0"),
"creative commons: attribution - non commercial - share alike 4.0": c_("Creative Commons Attribution Non-commercial Share Alike License 4.0"),
"creative commons: attribution - non derived work - non commercial 4.0": c_("Creative Commons Attribution Non-commercial No Derivatives License 4.0"),
"free software license GPL": c_("GNU General Public License")
}
html = ""
plicense = self.node.package.license
if plicense in licenses:
html += '<p align="center">'
html += c_("Licensed under the")
html += ' <a rel="license" href="%s">%s</a>' % (licenses[plicense], licenses_names[plicense])
if plicense == 'license GFDL':
html += ' <a href="fdl.html">(%s)</a>' % c_('Local Version')
html += '</p>'
return html
def renderFooter(self):
"""
Returns an XHTML string rendering the footer.
"""
dT = common.getExportDocType()
footerTag = "div"
if dT == "HTML5":
footerTag = "footer"
html = ""
if self.node.package.footer != "":
html += '<' + footerTag + ' id="siteFooter">'
html += self.node.package.footer + "</" + footerTag + ">"
return html
# ===========================================================================
def uniquifyNames(pages):
"""
Make sure all the page names are unique
"""
pageNames = {}
# First identify the duplicate names
for page in pages:
if page.name in pageNames:
pageNames[page.name] = 1
else:
pageNames[page.name] = 0
|
gaolichuang/py-essential
|
tests/testmods/fbar_foo_opt.py
|
Python
|
apache-2.0
| 719 | 0 |
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
|
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from essential.config import cfg
CONF = cfg.CONF
opt = cfg.StrOpt('foo')
C
|
ONF.register_opt(opt, group='fbar')
|
nlaanait/pyxrim
|
examples/hdf5_export.py
|
Python
|
mit
| 1,439 | 0.014593 |
"""
Created on 4/18/17
@author: Numan Laanait -- nlaanait@gmail.com
"""
#MIT License
#Copyright (c) 2017 Numan Laanait
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import pyxrim as px
import os
# define data paths
datadir = os.path.join(os.getcwd(),'data')
specfile = os.path.join(datadir,'BFO_STO_1_1.spec')
imagedi
|
r = os.path.join
|
(datadir,'images')
# load ioHDF5
io = px.ioHDF5('test.h5')
io.scans_export(specfile,imagedir)
io.close()
|
googleapis/python-assured-workloads
|
samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py
|
Python
|
apache-2.0
| 1,877 | 0.001598 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateWorkload
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-assured-workloads
# [START assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_
|
async]
from google.cloud import assuredworkloads_v1
async def sample_create_workload():
# Create a client
client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient()
# Initialize request argument(s)
workload = assuredworkloads_v1.Workload()
workload.display_name = "display_name_value"
workload.compliance_regime = "CA_REGIONS_AND_SUPPORT"
workload.billing_account = "billing_account_v
|
alue"
request = assuredworkloads_v1.CreateWorkloadRequest(
parent="parent_value",
workload=workload,
)
# Make the request
operation = client.create_workload(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_async]
|
jni/python-redshirt
|
redshirt/read.py
|
Python
|
mit
| 2,123 | 0.000471 |
import numpy as np
from skimage import io
def read_image(fn, normalize=True):
"""Read a CCD/CMOS image in .da format (Redshirt). [1_]
Parameters
----------
fn : string
The input filename.
Returns
-------
images : array, shape (nrow, ncol, nframes)
The images (normalized by the dark frame if desired).
frame_interval : float
The time elapsed between frames, in milliseconds.
bnc : array, shape (8, nframes)
The bnc data.
dark_frame : array, shape (nrow, ncol)
The dark frame by which the image data should be normalized.
Notes
-----
Interlaced images, as produced by the option "write directly to disk",
are not currently supported.
References
----------
.. [1] http://www.redshirtimaging.com/support/dfo.html
"""
data = np.fromfile(fn, dtype=np.int16)
header_size = 2560
header = data[:header_size]
ncols, nrows = map(int, header[384:386]) # prevent int16 overflow
nframes = int(header[4])
frame_interval = hea
|
der[388] / 1000
acquisition_ratio = header[391]
if frame_interval >= 10:
frame_interval *= header[390] # dividing factor
image_size = nrows * ncols * nframes
bnc_start = header_size + image_size
images = np.reshape(np.arr
|
ay(data[header_size:bnc_start]),
(nrows, ncols, nframes))
bnc_end = bnc_start + 8 * acquisition_ratio * nframes
bnc = np.reshape(np.array(data[bnc_start:bnc_end]), (8, nframes * acquisition_ratio))
dark_frame = np.reshape(np.array(data[bnc_end:-8]), (nrows, ncols))
if normalize:
images -= dark_frame[..., np.newaxis]
return images, frame_interval, bnc, dark_frame
def convert_images(fns, normalize=True):
for fn in fns:
image, frame_interval, bnc, dark_frame = read_image(fn, normalize)
out_fn = fn[:-3] + '.tif'
out_fn_dark = fn[:-3] + '.dark_frame.tif'
io.imsave(out_fn, np.transpose(image, (2, 0, 1)),
plugin='tifffile', compress=1)
io.imsave(out_fn_dark, dark_frame, plugin='tifffile', compress=1)
|
yaroslavprogrammer/django-modeltranslation
|
modeltranslation/__init__.py
|
Python
|
bsd-3-clause
| 2,020 | 0 |
# -*- coding: utf-8 -*-
"""
Version code adopted from Django development version.
https://github.com/django/django
"""
VERSION = (0, 7, 2, 'final', 0)
def get_version(version=None):
"""
Returns a PEP 386-compliant version number from VERSION.
"""
if version is None:
from modeltranslation import VERSION as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
def get_git_changeset():
"""
Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
TODO: Check if we can rely on services like read-the-docs to pick this up.
"""
import datetime
import os
import subprocess
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen(
'git log --pretty=format:%ct --quiet -1 HEAD', stdout=subprocess.PIPE,
stde
|
rr=subprocess.PIPE, shell=True,
|
cwd=repo_dir,
universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
|
NervanaSystems/neon
|
examples/video-c3d/network.py
|
Python
|
apache-2.0
| 2,289 | 0.002184 |
# ******************************************************************************
# Copyright 2014-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from neon.initializers import Constant, Gaussian
from neon.layers import Conv, Dropout, Pooling, Affine, GeneralizedCost
from neon.models import Model
from neon.transforms import Rectlin, Softmax, CrossEntropyMulti
def create_network():
# weight initialization
g1 = Gaussian(scale=0.01)
g5 = Gaussian(scale=0.005)
c0 = Constant(0)
c1 = Constan
|
t(1)
# model initialization
padding = {'pad_d': 1, 'pad_h': 1, 'pad_w': 1}
strides = {'str_d': 2, 'str_h': 2, 'st
|
r_w': 2}
layers = [
Conv((3, 3, 3, 64), padding=padding, init=g1, bias=c0, activation=Rectlin()),
Pooling((1, 2, 2), strides={'str_d': 1, 'str_h': 2, 'str_w': 2}),
Conv((3, 3, 3, 128), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Conv((3, 3, 3, 256), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Conv((3, 3, 3, 256), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Conv((3, 3, 3, 256), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Affine(nout=2048, init=g5, bias=c1, activation=Rectlin()),
Dropout(keep=0.5),
Affine(nout=2048, init=g5, bias=c1, activation=Rectlin()),
Dropout(keep=0.5),
Affine(nout=101, init=g1, bias=c0, activation=Softmax())
]
return Model(layers=layers), GeneralizedCost(costfunc=CrossEntropyMulti())
|
hkernbach/arangodb
|
3rdParty/V8/v5.7.492.77/tools/gyp/pylib/gyp/generator/msvs.py
|
Python
|
apache-2.0
| 133,929 | 0.009796 |
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import ntpath
import os
import posixpath
import re
import subprocess
import sys
import gyp.common
import gyp.easy_xml as easy_xml
import gyp.generator.ninja as ninja_generator
import gyp.MSVSNew as MSVSNew
import gyp.MSVSProject as MSVSProject
import gyp.MSVSSettings as MSVSSettings
import gyp.MSVSToolFile as MSVSToolFile
import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
from gyp.common import OrderedSet
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import gyp.ordered_dict
return gyp.ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
# IncrediBuild BuildConsole will parse the solution file, but then
# silently skip building the target causing hard to track down errors.
# Note that this only happens with the BuildConsole, and does not occur
# if IncrediBuild is executed from inside Visual Studio. This regex
# validates that the string looks like a GUID with all uppercase hex
# letters.
VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
generator_default_variables = {
'DRIVER_PREFIX': '',
'DRIVER_SUFFIX': '.sys',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '.exe',
'STATIC_LIB_PREFIX': '',
'SHARED_LIB_PREFIX': '',
'STATIC_LIB_SUFFIX': '.lib',
'SHARED_LIB_SUFFIX': '.dll',
'INTERMEDIATE_DIR': '$(IntDir)',
'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
'OS': 'win',
'PRODUCT_DIR': '$(OutDir)',
'LIB_DIR': '$(OutDir)lib',
'RULE_INPUT_ROOT': '$(InputName)',
'RULE_INPUT_DIRNAME': '$(InputDir)',
'RULE_INPUT_EXT': '$(InputExt)',
'RULE_INPUT_NAME': '$(InputFileName)',
'RULE_INPUT_PATH': '$(InputPath)',
'CONFIGURATION_NAME': '$(ConfigurationName)',
}
# The msvs specific sections that hold paths
generator_additional_path_sections = [
'msvs_cygwin_dirs',
'msvs_props',
]
generator_additional_non_configuration_keys = [
'msvs_cygwin_dirs',
'msvs_cygwin_shell',
'msvs_large_pdb',
'msvs_shard',
'msvs_external_builder',
'msvs_external_builder_out_dir',
'msvs_external_builder_build_cmd',
'msvs_external_builder_clean_cmd',
'msvs_external_builder_clcompile_cmd',
'msvs_enable_winrt',
'msvs_requires_importlibrary',
'msvs_enable_winphone',
'msvs_application_type_revision',
'msvs_target_platform_version',
'msvs_target_platform_minversion',
]
generator_filelist_paths = None
# List of precompiled header related keys.
precomp_keys = [
'msvs_precompiled_header',
'msvs_precompiled_source',
]
cached_username = None
cached_domain = None
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
# 64-bit.
def _GetDomainAndUserName():
if sys.platform not in ('win32', 'cygwin'):
return ('DOMAIN', 'USERNAME')
global cached_username
global cached_domain
if not cached_domain or not cached_username:
domain = os.environ.get('USERDOMAIN')
username = os.environ.get('USERNAME')
if not domain or not username:
call = subprocess.Popen(['net', 'config', 'Workstation'],
stdout=subprocess.PIPE)
config = call.communicate()[0]
username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
username_match = username_re.search(config)
if username_match:
username = username_match.group(1)
domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
domain_match = domain_re.search(config)
if domain_match:
domain = domain_match.group(1)
cached_domain = domain
cached_username = username
return (cached_domain, cached_username)
fixpath_prefix = None
def _NormalizedSource(source):
"""Normalize the path.
But not if that gets rid of a variable, as this may expand to something
larger than one directory.
Arguments:
source: The path to be normalize.d
Returns:
The normalized path.
"""
normalized = os.path.normpath(source)
if source.count('$') == normalized.count('$'):
source = normalized
return source
def _FixPath(path):
"""Convert paths to a form that will make sense in a vcproj file.
Arguments:
path: The path to convert, may contain / etc.
Returns:
The path with all slashes made into backslashes.
"""
if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
path = os.path.join(fixpath_prefix, path)
path = path.replace('/', '\\')
path = _NormalizedSource(path)
if path and path[-1] == '\\':
path = path[:-1]
return path
def _FixPaths(paths):
"""Fix each of the paths of the list."""
return [_FixPath(i) for i in paths]
def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
list_excluded=True, msvs_version=None):
"""Converts a list split source file paths into a vcproj folder hierarchy.
Arguments:
sources: A list of source file paths split.
prefix: A list of source file path layers meant to apply to each of sources.
excluded: A set of excluded files.
msvs_version: A MSVSVersion object.
Returns:
A hierarchy of filenames and MSVSProject.Filter objects that matches the
layout of the source tree.
For example:
_ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
prefix=['joe'])
-->
[MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
"""
if not prefix: prefix = []
result = []
excluded_result = []
folders = OrderedDict()
# Gather files into the final result, excluded, or folders.
for s in sources:
if len(s) == 1:
filename = _NormalizedSource('\\'.join(prefix + s))
if filename in excluded:
excluded_result.append(filename)
else:
result.append(filename)
elif msvs_version and not msvs_version.UsesVcxproj():
# For MSVS 2008 and earlier, we need to process all files before walking
# the sub folders.
if not folders.get(s[0]):
folders[s[0]] = []
folders[s[0]].append(s[1:])
else:
contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
excluded=excluded,
list_excluded=list_excluded,
msvs_version=msvs_version)
contents = MSVSProject.Filter(s[0], contents=contents)
result.append(contents)
# Add a folder for excluded files.
if excluded_result and list_excluded:
excluded_folder
|
= MSVSProject.Filter('_excluded_files',
contents=excluded_resul
|
t)
result.append(excluded_folder)
if msvs_version and msvs_version.UsesVcxproj():
return result
# Populate all the folders.
for f in folders:
contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
excluded=excluded,
list_excluded=list_excluded,
msvs_version=msvs_version)
contents = MSVSProject.Filter(f, contents=contents)
result.append(contents)
return result
def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
if not value: return
_ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
# TODO(bradnelson): ugly hack, fix this more generally!!!
if 'Directories' in setting or 'Dep
|
rgardler/acs-logging-test
|
src/runSimulation.py
|
Python
|
apache-2.0
| 269 | 0.003717 |
"""
Generate simulated logs, placing new items into the queue.
Process the
|
Queue, generating summary data and
appending entries to the log store
"""
|
import analyzer
import simulatedLogs
# simulate_logs
simulatedLogs.simulate()
# process queue
analyzer.complete()
|
belangeo/pyo
|
pyo/examples/09-callbacks/03-delayed-calls.py
|
Python
|
lgpl-3.0
| 2,090 | 0.000478 |
"""
03-delayed-calls.py - Calling a function once, after a given delay.
If you want to setup a callback once in the future, the CallAfter
object is very easy to use. You just give it the function name, the
time to wait before making the call and an optional argument.
"""
from pyo import *
s = Server().boot()
# A four-streams oscillator to produce a chord.
amp = Fader(fadein=0.005, fadeout=0.05, mul=0.2).play()
osc = SineLoop(freq=[0, 0, 0, 0], feedback=0.05, mul=amp)
rev = WGVerb(osc.mix(2), feedback=0.8, cutoff=4000, bal=0.2).out()
# A function to change the oscill
|
ator's frequencies and start the envelope.
def set_osc_freqs(notes):
print(notes)
osc.set(attr="freq", value=midiToHz(list(notes)), port=0.005)
amp.play()
# Initial ch
|
ord.
set_osc_freqs([60, 64, 67, 72])
# We must be sure that our CallAfter object stays alive as long as
# it waits to call its function. If we don't keep a reference to it,
# it will be garbage-collected before doing its job.
call = None
def new_notes(notes):
global call # Use a global variable.
amp.stop() # Start the fadeout of the current notes...
# ... then, 50 ms later, call the function that change the frequencies.
call = CallAfter(set_osc_freqs, time=0.05, arg=notes)
# The sequence of events. We use a tuple for the list of frequencies
# because PyoObjects spread lists as argument over all their internal
# streams. This means that with a list of frequencies, only the first
# frequency would be passed to the callback of the first (and single)
# stream (a list of functions at first argument would create a
# multi-stream object). A tuple is treated as a single argument.
c1 = CallAfter(new_notes, time=0.95, arg=(60, 64, 67, 69))
c2 = CallAfter(new_notes, time=1.95, arg=(60, 65, 69, 76))
c3 = CallAfter(new_notes, time=2.95, arg=(62, 65, 69, 74))
c4 = CallAfter(new_notes, time=3.45, arg=(59, 65, 67, 74))
c5 = CallAfter(new_notes, time=3.95, arg=(60, 64, 67, 72))
# The last event activates the fadeout of the amplitude envelope.
c6 = CallAfter(amp.stop, time=5.95, arg=None)
s.gui(locals())
|
dmanev/ArchExtractor
|
ArchExtractor/umlgen/Specific/STK/StkParser/StkCFileCriteria/StkCHeaderProvDATControlCriteria.py
|
Python
|
gpl-3.0
| 1,064 | 0.007519 |
import StkPortInterfaces.StkDATControlIf
import PortInterface.ProvidedPort
import re
import StkParser.StkPortCriteria
import Components.IComponent
import Parser.IPortCriteria
class StkCHeaderProvDATControlCriteria(StkParser.StkPortCriteria.StkPortCriteria):
"""STK C Header file provided DATControl criteria"""
def execute(self, inpTextContent, inoutIComponent):
## Bouml preserved body begin 000389EF
for datControl in re.findall(r'\#define\s+mDATControl(\w+)\s*\(\s*\w+\s*\)',
|
inpTextContent):
pif = self.getPortInterfaceFactory()
dtf = self.getDataTypeFactory()
clSrvIntIf = pif.getS
|
tkDATControlIf(datControl, dtf)
provPort = PortInterface.ProvidedPort.ProvidedPort(clSrvIntIf)
provPort.setName(datControl)
provPort.setInterface(clSrvIntIf)
inoutIComponent.addPort(provPort)
## Bouml preserved body end 000389EF
def __init__(self):
super(StkCHeaderProvDATControlCriteria, self).__init__()
pass
|
kubeflow/pipelines
|
sdk/python/kfp/compiler_cli_tests/test_data/two_step_pipeline.py
|
Python
|
apache-2.0
| 2,004 | 0 |
# Copyright 2020 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from kfp import components
from kfp import dsl
from kfp import compiler
component_op_1 = components.load_component_from_text("""
name: Write to GCS
inputs:
- {name: text, type: String, description: 'Content to be written to GCS'}
outputs:
- {name: output_gcs_path, type: GCSPath, description: 'GCS file path'}
implementation:
container:
image: google/cloud-sdk:slim
command:
- sh
- -c
- |
set -e -x
echo "$0" | gsutil cp - "$1"
- {inputValue: text}
- {outputUri: output_gcs_path}
""")
component_op_2 = components.load_component_from_text("""
name: Read from GCS
inputs:
- {name: input_gcs_path, type: GCSPath, description: 'GCS file path'}
implementation:
container:
image: google/cloud-sdk:slim
command:
- sh
- -c
- |
set -e -x
gsutil cat "$0"
- {inputUri: input_gcs_path}
""")
|
@dsl.pipeline(name='simple-two-step-pipeline', pipeline_root='dummy_root')
def my_pipeline(text: str = 'Hello world!'):
component_1 = component_op_1(text=text).set_display_name('Producer
|
')
component_2 = component_op_2(
input_gcs_path=component_1.outputs['output_gcs_path'])
component_2.set_display_name('Consumer')
if __name__ == '__main__':
compiler.Compiler().compile(
pipeline_func=my_pipeline,
pipeline_parameters={'text': 'Hello KFP!'},
package_path=__file__.replace('.py', '.json'))
|
it-projects-llc/pos-addons
|
wechat/__init__.py
|
Python
|
mit
| 100 | 0 |
# License MI
|
T (https://opensource.org/licenses/MIT).
from . import models
from . import c
|
ontrollers
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/cherrypy/cherrypy/lib/caching.py
|
Python
|
bsd-3-clause
| 17,413 | 0.003963 |
"""
CherryPy implements a simple caching system as a pluggable Tool. This tool tries
to be an (in-process) HTTP/1.1-compliant cache. It's not quite there yet, but
it's probably good enough for most sites.
In general, GET responses are cached (along with selecting headers) and, if
another request arrives for the same resource, the caching Tool will return 304
Not Modified if possible, or serve the cached response otherwise. It also sets
request.cached to True if serving a cached representation, and sets
request.cacheable to False (so it doesn't get cached again).
If POST, PUT, or DELETE requests are made for a cached resource, they invalidate
(delete) any cached response.
Usage
=====
Configuration file example::
[/]
tools.caching.on = True
tools.caching.delay = 3600
You may use a class other than the default
:class:`MemoryCache<cherrypy.lib.caching.MemoryCache>` by supplying the config
entry ``cache_class``; supply the full dotted name of the replacement class
as the config value. It must implement the basic methods ``get``, ``put``,
``delete``, and ``clear``.
You may set any attribute, including overriding methods, on the cache
instance by providing them in config. The above sets the
:attr:`delay<cherrypy.lib.caching.MemoryCache.delay>` attribute, for example.
"""
import datetime
import sys
import threading
import time
import cherrypy
from cherrypy.lib import cptools, httputil
from cherrypy._cpcompat import copyitems, ntob, set_daemon, sorted
class Cache(object):
"""Base class for Cache implementations."""
def get(self):
"""Return the current variant if in the cache, else None."""
raise NotImplemented
def put(self, obj, size):
"""Store the current variant in the cach
|
e."""
raise NotImplemented
def delete(self):
"""Remove ALL cached variants of the current resource."""
raise NotImplemented
def clear(self):
"""Reset the cache to its initial, empty state."""
raise NotImplemented
# ------------------------------- Memory Cache ------------------------------- #
class AntiStampede
|
Cache(dict):
"""A storage system for cached items which reduces stampede collisions."""
def wait(self, key, timeout=5, debug=False):
"""Return the cached value for the given key, or None.
If timeout is not None, and the value is already
being calculated by another thread, wait until the given timeout has
elapsed. If the value is available before the timeout expires, it is
returned. If not, None is returned, and a sentinel placed in the cache
to signal other threads to wait.
If timeout is None, no waiting is performed nor sentinels used.
"""
value = self.get(key)
if isinstance(value, threading._Event):
if timeout is None:
# Ignore the other thread and recalc it ourselves.
if debug:
cherrypy.log('No timeout', 'TOOLS.CACHING')
return None
# Wait until it's done or times out.
if debug:
cherrypy.log('Waiting up to %s seconds' % timeout, 'TOOLS.CACHING')
value.wait(timeout)
if value.result is not None:
# The other thread finished its calculation. Use it.
if debug:
cherrypy.log('Result!', 'TOOLS.CACHING')
return value.result
# Timed out. Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return None
elif value is None:
# Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return value
def __setitem__(self, key, value):
"""Set the cached value for the given key."""
existing = self.get(key)
dict.__setitem__(self, key, value)
if isinstance(existing, threading._Event):
# Set Event.result so other threads waiting on it have
# immediate access without needing to poll the cache again.
existing.result = value
existing.set()
class MemoryCache(Cache):
"""An in-memory cache for varying response content.
Each key in self.store is a URI, and each value is an AntiStampedeCache.
The response for any given URI may vary based on the values of
"selecting request headers"; that is, those named in the Vary
response header. We assume the list of header names to be constant
for each URI throughout the lifetime of the application, and store
that list in ``self.store[uri].selecting_headers``.
The items contained in ``self.store[uri]`` have keys which are tuples of
request header values (in the same order as the names in its
selecting_headers), and values which are the actual responses.
"""
maxobjects = 1000
"""The maximum number of cached objects; defaults to 1000."""
maxobj_size = 100000
"""The maximum size of each cached object in bytes; defaults to 100 KB."""
maxsize = 10000000
"""The maximum size of the entire cache in bytes; defaults to 10 MB."""
delay = 600
"""Seconds until the cached content expires; defaults to 600 (10 minutes)."""
antistampede_timeout = 5
"""Seconds to wait for other threads to release a cache lock."""
expire_freq = 0.1
"""Seconds to sleep between cache expiration sweeps."""
debug = False
def __init__(self):
self.clear()
# Run self.expire_cache in a separate daemon thread.
t = threading.Thread(target=self.expire_cache, name='expire_cache')
self.expiration_thread = t
set_daemon(t, True)
t.start()
def clear(self):
"""Reset the cache to its initial, empty state."""
self.store = {}
self.expirations = {}
self.tot_puts = 0
self.tot_gets = 0
self.tot_hist = 0
self.tot_expires = 0
self.tot_non_modified = 0
self.cursize = 0
def expire_cache(self):
"""Continuously examine cached objects, expiring stale ones.
This function is designed to be run in its own daemon thread,
referenced at ``self.expiration_thread``.
"""
# It's possible that "time" will be set to None
# arbitrarily, so we check "while time" to avoid exceptions.
# See tickets #99 and #180 for more information.
while time:
now = time.time()
# Must make a copy of expirations so it doesn't change size
# during iteration
for expiration_time, objects in copyitems(self.expirations):
if expiration_time <= now:
for obj_size, uri, sel_header_values in objects:
try:
del self.store[uri][tuple(sel_header_values)]
self.tot_expires += 1
self.cursize -= obj_size
except KeyError:
# the key may have been deleted elsewhere
pass
del self.expirations[expiration_time]
time.sleep(self.expire_freq)
def get(self):
"""Return the current variant if in the cache, else None."""
request = cherrypy.serving.request
self.tot_gets += 1
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
return None
header_values = [request.headers.get(h, '')
|
google-research/sloe-logistic
|
setup.py
|
Python
|
apache-2.0
| 1,690 | 0.001183 |
# Copyright 2021 The SLOE Logistic Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds sloe_logistic package."""
from distutils import core
from distutils.command import build_clib
from pybind11.setup_helpers import build_ext
from pybind11.setup_helpers import Pybind11Extension
libraries = [
("scipy_brentq", {
"sources": ["third_party/py/scipy/optimize/Zeros/brentq.c",],
}),
]
ext_modules = [
Pybind11Extension("sloe_logistic.mle_param_integrands", [
"mle_param_integrands.cc",
]),
]
core.setup(
name="sloe_logistic",
version="0.0.1",
description="Implements SLOE method and Logistic Regression Inference",
long_description="Code to supplement the ICML submission SLOE: A Faster "
"Method for Statistical Inference in High-Dimensional Logistic Regression.",
packages=["sloe_logistic", "sloe_logistic.sloe_experiments"],
package_dir={
"sloe_logistic": ".",
"sloe_logistic.sloe_experiments": "sloe_experiments/"
},
libraries=libraries,
|
ext_modules=ext_modules,
cmdclass={
"build_ext": build_ext,
"build_clib": build_clib.build_clib,
|
},
zip_safe=False,
)
|
ramusus/django-vkontakte-video
|
vkontakte_video/admin.py
|
Python
|
bsd-3-clause
| 1,566 | 0.002581 |
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.core.urlresolvers import reverse
from vkontakte_api.admin import VkontakteModel
|
Admin
from .models import Album, Video
class VideoInline(admin.TabularInline):
def imag
|
e(self, instance):
return '<img src="%s" />' % (instance.photo_130,)
image.short_description = 'video'
image.allow_tags = True
model = Video
fields = ('title', 'image', 'owner', 'comments_count', 'views_count')
readonly_fields = fields
extra = False
can_delete = False
class AlbumAdmin(VkontakteModelAdmin):
def image_preview(self, obj):
return u'<a href="%s"><img src="%s" height="30" /></a>' % (obj.photo_160, obj.photo_160)
image_preview.short_description = u'Картинка'
image_preview.allow_tags = True
list_display = ('image_preview', 'remote_id', 'title', 'owner', 'videos_count')
list_display_links = ('title', 'remote_id',)
search_fields = ('title', 'description')
inlines = [VideoInline]
class VideoAdmin(VkontakteModelAdmin):
def image_preview(self, obj):
return u'<a href="%s"><img src="%s" height="30" /></a>' % (obj.photo_130, obj.photo_130)
image_preview.short_description = u'Картинка'
image_preview.allow_tags = True
list_display = ('image_preview', 'remote_id', 'owner', 'album', 'title', 'comments_count', 'views_count', 'date')
list_display_links = ('remote_id', 'title')
list_filter = ('album',)
admin.site.register(Album, AlbumAdmin)
admin.site.register(Video, VideoAdmin)
|
fishstamp82/loprop
|
test/h2o_data.py
|
Python
|
gpl-3.0
| 11,431 | 0.017234 |
from ..daltools.util.full import init
Z = [8., 1., 1.]
Rc = init([0.00000000, 0.00000000, 0.48860959])
Dtot = [0, 0, -0.76539388]
Daa = init([
[ 0.00000000, 0.00000000, -0.28357300],
[ 0.15342658, 0.00000000, 0.12734703],
[-0.15342658, 0.00000000, 0.12734703],
])
QUc = init([-7.31176220, 0., 0., -5.43243232, 0., -6.36258665])
QUN = init([4.38968295, 0., 0., 0., 0., 1.75400326])
QUaa = init([
[-3.29253618, 0.00000000, 0.00000000, -4.54316657, 0.00000000, -4.00465380],
[-0.13213704, 0.00000000, 0.24980518, -0.44463288, 0.00000000, -0.26059139],
[-0.13213704, 0.00000000,-0.24980518, -0.44463288, 0.00000000, -0.26059139]
])
Fab = init([
[-0.11E-03, 0.55E-04, 0.55E-04],
[ 0.55E-04, -0.55E-04, 0.16E-30],
[ 0.55E-04, 0.16E-30, -0.55E-04]
])
Lab = init([
[0.11E-03, 0.28E-03, 0.28E-03],
[0.28E-03, 0.17E-03, 0.22E-03],
[0.28E-03, 0.22E-03, 0.17E-03]
])
la = init([
[0.0392366,-27.2474016 , 27.2081650],
[0.0358964, 27.2214515 ,-27.2573479],
[0.01211180, -0.04775576, 0.03564396],
[0.01210615, -0.00594030, -0.00616584],
[10.69975088, -5.34987556, -5.34987532],
[-10.6565582, 5.3282791 , 5.3282791]
])
O = [
0.76145382,
-0.00001648, 1.75278523,
-0.00007538, 0.00035773, 1.39756345
]
H1O = [
3.11619527,
0.00019911, 1.25132346,
2.11363325, 0.00111442, 2.12790474
]
H1 = [
0.57935224,
0.00018083, 0.43312326,
0.11495546, 0.00004222, 0.45770123
]
H2O = [
3.11568759,
0.00019821, 1.25132443,
-2.11327482, -0.00142746, 2.12790473
]
H2H1 = [
0.04078206,
-0.00008380, -0.01712262,
-0.00000098, 0.00000084, -0.00200285
]
H2 = [
0.57930522,
0.00018221, 0.43312149,
-0.11493635, -0.00016407, 0.45770123
]
Aab = init([O, H1O, H1, H2O, H2H1, H2])
Aa = init([
[ 3.87739525, 0.00018217, 3.00410918, 0.00010384, 0.00020122, 3.52546819 ],
[ 2.15784091, 0.00023848, 1.05022368, 1.17177159, 0.00059985, 1.52065218 ],
[ 2.15754005, 0.00023941, 1.05022240, -1.17157425, -0.00087738, 1.52065217 ]
])
ff = 0.001
rMP = init([
#O
[
[-8.70343886, 0.00000000, 0.00000000, -0.39827574, -3.68114747, 0.00000000, 0.00000000, -4.58632761, 0.00000000, -4.24741556],
[-8.70343235, 0.00076124, 0.00000000, -0.39827535, -3.68114147, 0.00000000, 0.00193493, -4.58631888, 0.00000000, -4.24741290],
[-8.70343291,-0.00076166, 0.00000000, -0.39827505, -3.68114128, 0.00000000, -0.00193603, -4.58631789, 0.00000000, -4.24741229],
[-8.70343685,-0.00000006, 0.00175241, -0.39827457, -3.68114516, 0.00000000, 0.00000161, -4.58632717, 0.00053363, -4.24741642],
[-8.70343685, 0.00000000, -0.00175316, -0.39827456, -3.68114514, 0.00000000, 0.00000000, -4.58632711, -0.00053592, -4.24741639],
[-8.70166502, 0.00000000, 0.00000144, -0.39688042, -3.67884999, 0.00000000, 0.00000000, -4.58395384, 0.00000080, -4.24349307],
[-8.70520554, 0.00000000, 0.00000000, -0.39967554, -3.68344246, 0.00000000, 0.00000000, -4.58868836, 0.00000000, -4.25134640],
],
#H1O
|
[
[ 0.00000000, 0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, 0.43066796, 0.04316104, 0.00000000, 0.36285790],
[ 0.00150789, 0.10111974, 0.00000000, 0.11541803, 0.53753360, 0.00000000, 0.43120945, 0.04333774, 0.00000000, 0.36314215],
[-0.00150230,
|
0.09934695, 0.00000000, 0.11398581, 0.53667861, 0.00000000, 0.43012612, 0.04298361, 0.00000000, 0.36257249],
[ 0.00000331, 0.10023328, 0.00125017, 0.11470067, 0.53710812, -0.00006107, 0.43066944, 0.04316020, 0.00015952, 0.36285848],
[ 0.00000100, 0.10023249, -0.00125247, 0.11470042, 0.53710716, 0.00006135, 0.43066837, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692, 0.10059268, -0.00000064, 0.11590322, 0.53754715, -0.00000006, 0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334, 0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, 0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
#H1
[
[-0.64828057, 0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, -0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64978846, 0.10389186, 0.00000000, 0.07204462, -0.47729337, 0.00000000, -0.03154159, -0.47074619, 0.00000000, -0.50963693],
[-0.64677827, 0.10273316, 0.00000000, 0.07173584, -0.47408263, 0.00000000, -0.03134407, -0.46768337, 0.00000000, -0.50674873],
[-0.64828388, 0.10331167, 0.00043314, 0.07189029, -0.47568875, -0.00023642, -0.03144270, -0.46921635, -0.00021728, -0.50819386],
[-0.64828157, 0.10331095, -0.00043311, 0.07188988, -0.47568608, 0.00023641, -0.03144256, -0.46921346, 0.00021729, -0.50819095],
[-0.64916749, 0.10338629, -0.00000024, 0.07234862, -0.47634698, 0.00000013, -0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723, 0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, -0.03129003, -0.46838912, 0.00000000, -0.50701656],
],
#H2O
[
[ 0.00000000,-0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, -0.43066796, 0.04316104, 0.00000000, 0.36285790],
[-0.00150139,-0.09934749, 0.00000000, 0.11398482, 0.53667874, 0.00000000, -0.43012670, 0.04298387, 0.00000000, 0.36257240],
[ 0.00150826,-0.10112008, 0.00000000, 0.11541676, 0.53753350, 0.00000000, -0.43120982, 0.04333795, 0.00000000, 0.36314186],
[-0.00000130,-0.10023170, 0.00125018, 0.11470018, 0.53710620, 0.00006107, -0.43066732, 0.04316017, 0.00015952, 0.36285728],
[ 0.00000101,-0.10023249, -0.00125247, 0.11470042, 0.53710716, -0.00006135, -0.43066838, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692,-0.10059268, -0.00000064, 0.11590322, 0.53754715, 0.00000006, -0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334,-0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, -0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
#H2H1
[
[ 0.00000000, 0.00000000, 0.00000000, -0.00378789, 0.00148694, 0.00000000, 0.00000000, 0.00599079, 0.00000000, 0.01223822],
[ 0.00000000, 0.00004089, 0.00000000, -0.00378786, 0.00148338, 0.00000000, -0.00004858, 0.00599281, 0.00000000, 0.01224094],
[ 0.00000000,-0.00004067, 0.00000000, -0.00378785, 0.00148341, 0.00000000, 0.00004861, 0.00599277, 0.00000000, 0.01224093],
[ 0.00000000,-0.00000033, -0.00001707, -0.00378763, 0.00149017, 0.00000000, 0.00000001, 0.00599114, -0.00001229, 0.01223979],
[ 0.00000000, 0.00000000, 0.00001717, -0.00378763, 0.00149019, 0.00000000, 0.00000000, 0.00599114, 0.00001242, 0.01223980],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378978, 0.00141897, 0.00000000, 0.00000000, 0.00590445, 0.00000002, 0.01210376],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378577, 0.00155694, 0.00000000, 0.00000000, 0.00607799, 0.00000000, 0.01237393],
],
#H2
[
[-0.64828057,-0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, 0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64677918,-0.10273369, 0.00000000, 0.07173576, -0.47408411, 0.00000000, 0.03134408, -0.46768486, 0.00000000, -0.50674986],
[-0.64978883,-0.10389230, 0.00000000, 0.07204446, -0.47729439, 0.00000000, 0.03154159, -0.47074717, 0.00000000, -0.50963754],
[-0.64827927,-0.10331022, 0.00043313, 0.07188947, -0.47568340, 0.00023642, 0.03144242, -0.46921057, -0.00021727, -0.50818804],
[-0.64828158,-0.10331095, -0.00043311, 0.07188988, -0.47568609, -0.00023641, 0.03144256, -0.46921348, 0.00021729, -0.50819097],
[-0.64916749,-0.10338629, -0.00000024, 0.07234862, -0.47634698, -0.00000013, 0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723,-0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, 0.03
|
mick-d/nipype
|
nipype/algorithms/tests/test_auto_MeshWarpMaths.py
|
Python
|
bsd-3-clause
| 1,014 | 0.013807 |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..mesh import MeshWarpMaths
def test_MeshWarpMaths_inputs():
input_map =
|
dict(float_trait=dict(),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_surf=dict(mandatory=True,
),
operation=dict(usedefault=True,
),
operator=dict(mandatory=True,
),
out_file=dict(usedefault=True,
|
),
out_warp=dict(usedefault=True,
),
)
inputs = MeshWarpMaths.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_MeshWarpMaths_outputs():
output_map = dict(out_file=dict(),
out_warp=dict(),
)
outputs = MeshWarpMaths.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
|
shire210/Shire-HA
|
custom_components/hue_custom/device_tracker.py
|
Python
|
mit
| 3,939 | 0.000508 |
"""
Sensor for checking the status of Hue sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.hue/
"""
import asyncio
import async_timeout
from datetime import timedelta
import logging
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_HOME,
STATE_NOT_HOME,
ATTR_GPS_ACCURACY,
ATTR_LATITUDE,
ATTR_LONGITUDE,
)
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.components.device_tracker import (
CONF_SCAN_INTERVAL,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.util import slugify
from homeassistant.components import zone
__version__ = "1.0.5"
DEPENDENCIES = ["hue"]
_LOGGER = logging.getLogger(__name__)
TYPE_GEOFENCE = "Geofence"
SCAN_INTERVAL = DEFAULT_SCAN_INTERVAL
def get_bridges(hass):
from homeassistant.components import hue
from homeassistant.components.hue.bridge import HueBridge
return [
entry
for entry in hass.data[hue.DOMAIN].values()
if isinstance(entry, HueBridge) and entry.api
]
async def update_api(api):
import aiohue
try:
with async_timeout.timeout(10):
await api.update()
except (asyncio.TimeoutError, aiohue.AiohueException) as err:
_LOGGER.debug("Failed to fetch sensors: %s", err)
return False
return True
async def async_setup_scanner(hass, config, async_see, discovery_info=None):
interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
scanner = HueDeviceScanner(hass, async_see)
await scanner.async_start(hass, interval)
return True
class HueDeviceScanner(DeviceScanner):
def __init__(self, hass, async_see):
"""Initialize the scanner."""
self.hass = hass
self.async_see = async_see
async def async_start(self, hass, interval):
"""Perform a first update and start polling at the given interval."""
await self.async_update_info()
interval = max(interval, SCAN_INTERVAL)
async_track_time_interval(hass, self.async_update_info, interval)
async def async_see_sensor(self, sensor):
last_updated = sensor.state.get("lastupdated")
if not last_updated or last_updated == "none":
return
kwargs = {
"dev_id": slugify("hue_{}".format(sensor.name)),
"host_name": sensor.name,
"attributes": {
"last_updated": dt_util.as_local(dt_util.parse_datetime(last_updated)),
"unique_id": sensor.uniqueid,
},
}
if sensor.state.get("presence"):
kwargs["location_name"] = STATE_HOME
zone_home = self.hass.states.get(zone.ENTITY_ID_HOME)
if zone_home:
kwargs["gps"] = [
|
zone_home.attributes[ATTR_LATITUDE],
zone_home.attributes[ATTR_LONGITUDE],
]
kwargs[ATTR_GPS_ACCURACY] = 0
else:
|
kwargs["location_name"] = STATE_NOT_HOME
_LOGGER.debug(
"Hue Geofence %s: %s (%s)",
sensor.name,
kwargs["location_name"],
kwargs["attributes"],
)
result = await self.async_see(**kwargs)
return result
async def async_update_info(self, now=None):
"""Get the bridge info."""
bridges = get_bridges(self.hass)
if not bridges:
return
await asyncio.wait(
[update_api(bridge.api.sensors) for bridge in bridges], loop=self.hass.loop
)
sensors = [
self.async_see_sensor(sensor)
for bridge in bridges
for sensor in bridge.api.sensors.values()
if sensor.type == TYPE_GEOFENCE
]
if not sensors:
return
await asyncio.wait(sensors)
|
gelab/mainr
|
tests/test_pooling.py
|
Python
|
gpl-2.0
| 14,346 | 0.00007 |
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for mysql.connector.pooling
"""
import uuid
try:
from Queue import Queue
except ImportError:
# Python 3
from queue import Queue
import tests
import mysql.connector
from mysql.connector import errors
from mysql.connector.connection import MySQLConnection
from mysql.connector import pooling
class PoolingTests(tests.MySQLConnectorTests):
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test_generate_pool_name(self):
self.assertRaises(errors.PoolError, pooling.generate_pool_name)
config = {'host': 'ham', 'database': 'spam'}
self.assertEqual('ham_spam',
pooling.generate_pool_name(**config))
config = {'database': 'spam', 'port': 3377, 'host': 'example.com'}
self.assertEqual('example.com_3377_spam',
pooling.generate_pool_name(**config))
config = {
'user': 'ham', 'database': 'spam',
'port': 3377, 'host': 'example.com'}
self.assertEqual('example.com_3377_ham_spam',
pooling.generate_pool_name(**config))
class PooledMySQLConnectionTests(tests.MySQLConnectorTests):
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test___init__(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
self.assertRaises(TypeError, pooling.PooledMySQLConnection)
cnx = MySQLConnection(**dbconfig)
pcnx = pooling.PooledMySQLConnection(cnxpool, cnx)
self.assertEqual(cnxpool, pcnx._cnx_pool)
self.assertEqual(cnx, pcnx._cnx)
self.assertRaises(AttributeError, pooling.PooledMySQLConnection,
None, None)
self.assertRaises(AttributeError, pooling.PooledMySQLConnection,
cnxpool, None)
def test___getattr__(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, pool_name='test')
cnx = MySQLConnection(**dbconfig)
pcnx = pooling.PooledMySQLConnection(cnxpool, cnx)
exp_attrs = {
'_connection_timeout': dbconfig['connection_timeout'],
'_database': dbconfig['database'],
'_host': dbconfig['host'],
'_password': dbconfig['password'],
'_port': dbconfig['port'],
'_unix_socket': dbconfig['unix_socket']
}
for attr, value in exp_attrs.items():
self.assertEqual(
value,
getattr(pcnx, attr),
"Attribute {0} of reference connection not correct".format(
attr))
self.assertEqual(pcnx.connect, cnx.connect)
def test_close(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
cnxpool._original_cnx = None
def dummy_add_connection(self, cnx=None):
self._original_cnx = cnx
cnxpool.add_connection = dummy_add_connection.__get__(
cnxpool, pooling.MySQLConnectionPool)
pcnx = pooling.PooledMySQLConnection(cnxpool,
MySQLConnection(**dbconfig))
cnx = pcnx._cnx
pcnx.close()
self.assertEqual(cnx, cnxpool._original_cnx)
def test_config(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
cnx = cnxpool.get_connection()
self.assertRaises(errors.PoolError, cnx.config, user='spam')
class MySQLConnectionPoolTests(tests.MySQLConnectorTests):
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test___init__(self):
dbconfig = tests.get_mysql_config()
self.assertRaises(errors.PoolError, pooling.MySQLConnectionPool)
self.assertRaises(AttributeError, pooling.MySQLConnectionPool,
pool_name='test',
pool_size=-1)
self.assertRaises(AttributeError, pooling.MySQLConnectionPool,
pool_name='test',
pool_size=0)
self.assertRaises(AttributeError, pooling.MySQLConnectionPool,
pool_name='test',
pool_size=(pooling.CNX_POOL_MAXSIZE + 1))
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
self.assertEqual(5, cnxpool._pool_size)
self.assertEqual('test', cnxpool._pool_name)
self.assertEqual({}, cnxpool._cnx_config)
self.assertTrue(isinstance(cnxpool._cnx_queue, Queue))
self.assertTrue(isinstance(cnxpool._config_version, uuid.UUID))
self.assertTrue(True, cnxpool._reset_session)
cnxpool = pooling.MySQLConnectionPool(pool_size=10, pool_name='test')
self.assertEqual(10, cnxpool._pool_size)
cnxpool = pooling.MySQLConnectionPool(pool_size=10, **dbconfig)
self.assertEqual(dbconfig, cnxpool._cnx_config,
"Connection configuration not saved correctly")
self.assertEqual(10, cnxpool._cnx_queue.qsize())
self.assertTrue(isinstance(cnxpool._config_version, uuid.UUID))
cnx
|
pool = pooling.MySQLConnectionPool(pool_size=1, pool_name='test',
pool_reset_session=False)
self.assertFalse(cnxpool._reset_session)
def test_pool_name(self):
"""Test MySQLConnectionPool.pool_name property"""
pool_name = 'ham'
cnxpool = pooling.MySQLConnectionPool(pool_name=pool_name)
self.assertEqual(pool_name, cnxpool.pool_name)
def test_reset_session(self
|
):
"""Test MySQLConnectionPool.reset_session property"""
cnxpool = pooling.MySQLConnectionPool(pool_name='test',
pool_reset_session=False)
self.assertFalse(cnxpool.reset_session)
cnxpool._reset_session = True
self.assertTrue(cnxpool.reset_session)
def test_pool_size(self):
"""Test MySQLConnectionPool.pool_size property"""
pool_size = 4
cnxpool = pooling.MySQLConnectionPool(pool_name='test',
pool_size=pool_size)
self.assertEqual(pool_size, cnxpool.pool_size)
def test_reset_session(self):
"""Test MySQLConnectionPool.reset_session property"""
cnxpool = pooling.MySQLConnectionPool(pool_name='test',
pool_reset_session=False)
self.assertFalse(cnxpool.reset_session)
cnxpool._reset_session = True
self.assertTrue(cnxpool.reset_session)
def test__set_pool_size(self):
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
self.assertRaises(AttributeError, cnxpool._set_pool_size, -1)
self.assertRaises(AttributeError, cnxpool._set_pool_size, 0)
self.assertRaises(AttributeError, cnxpool._set_pool_size,
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.