text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.1',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
| SebastienPittet/cavelink | setup.py | Python | mit | 1,270 | 0 |
#!/usr/local/bin/python3.4
"""
Simple example to send a WoL to a registared system on the ISY
if this script is call without any arg
we print a list of registared WoL systems
if we have any args we treat them as registared WoL Id's
and attempt to send a WoL packet
"""
__author__ = "Peter Shipley"
import sys
import ISY
from ISY.IsyExceptionClass import IsyResponseError, IsyValueError
def main(isy):
if len(sys.argv[1:]) > 0:
for a in sys.argv[1:] :
try :
isy.net_wol(a)
except (IsyValueError, IsyResponseError) as errormsg :
print("problem sending WOL to {!s} : {!s}".format(a, errormsg))
continue
else :
print("WOL sent to {!s}".format(a))
else :
pfmt = "{:<5}{:<16} {:<20}"
print(pfmt.format("Id", "Name", "Mac"))
print(pfmt.format("-" * 4, "-" * 20, "-" * 20))
for w in isy.net_wol_iter():
if "id" in w :
print(pfmt.format(w['id'], w['name'], w['mac']))
if __name__=="__main__":
myisy= ISY.Isy(parsearg=1)
main(myisy)
exit(0)
| fxstein/ISYlib-python | bin/isy_net_wol.py | Python | bsd-2-clause | 1,132 | 0.013251 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import platform
import shutil
import sys
sys.path.append('bin')
from autojump_argparse import ArgumentParser
SUPPORTED_SHELLS = ('bash', 'zsh', 'fish')
def cp(src, dest, dryrun=False):
print("copying file: %s -> %s" % (src, dest))
if not dryrun:
shutil.copy(src, dest)
def get_shell():
return os.path.basename(os.getenv('SHELL', ''))
def mkdir(path, dryrun=False):
print("creating directory:", path)
if not dryrun and not os.path.exists(path):
os.makedirs(path)
def modify_autojump_sh(etc_dir, dryrun=False):
"""Append custom installation path to autojump.sh"""
custom_install = "\
\n# check custom install \
\nif [ -s %s/autojump.${shell} ]; then \
\n\tsource %s/autojump.${shell} \
\nfi\n" % (etc_dir, etc_dir)
with open(os.path.join(etc_dir, 'autojump.sh'), 'a') as f:
f.write(custom_install)
def parse_arguments():
default_user_destdir = os.path.join(os.path.expanduser("~"), '.autojump')
default_user_prefix = ''
default_user_zshshare = 'functions'
default_system_destdir = '/'
default_system_prefix = '/usr/local'
default_system_zshshare = '/usr/share/zsh/site-functions'
parser = ArgumentParser(
description='Installs autojump globally for root users, otherwise \
installs in current user\'s home directory.')
parser.add_argument(
'-n', '--dryrun', action="store_true", default=False,
help='simulate installation')
parser.add_argument(
'-f', '--force', action="store_true", default=False,
help='skip root user, shell type, Python version checks')
parser.add_argument(
'-d', '--destdir', metavar='DIR', default=default_user_destdir,
help='set destination to DIR')
parser.add_argument(
'-p', '--prefix', metavar='DIR', default=default_user_prefix,
help='set prefix to DIR')
parser.add_argument(
'-z', '--zshshare', metavar='DIR', default=default_user_zshshare,
help='set zsh share destination to DIR')
parser.add_argument(
'-s', '--system', action="store_true", default=False,
help='install system wide for all users')
args = parser.parse_args()
if not args.force:
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
print("Python v2.6+ or v3.0+ required.", file=sys.stderr)
sys.exit(1)
if get_shell() not in SUPPORTED_SHELLS:
print("Unsupported shell: %s" % os.getenv('SHELL'),
file=sys.stderr)
sys.exit(1)
if args.system and os.geteuid() != 0:
print("Please rerun as root for system-wide installation.",
file=sys.stderr)
sys.exit(1)
if args.destdir != default_user_destdir \
or args.prefix != default_user_prefix \
or args.zshshare != default_user_zshshare:
args.custom_install = True
else:
args.custom_install = False
if args.system:
if args.custom_install:
print("Custom paths incompatible with --system option.",
file=sys.stderr)
sys.exit(1)
args.destdir = default_system_destdir
args.prefix = default_system_prefix
args.zshshare = default_system_zshshare
return args
def print_post_installation_message(etc_dir):
if get_shell() == 'fish':
aj_shell = '%s/autojump.fish' % etc_dir
source_msg = "if test -f %s; . %s; end" % (aj_shell, aj_shell)
# TODO(ting|2013-12-31): check config.fish location on OSX
rcfile = '~/.config/fish/config.fish'
else:
aj_shell = '%s/autojump.sh' % etc_dir
source_msg = "[[ -s %s ]] && source %s" % (aj_shell, aj_shell)
if platform.system() == 'Darwin' and get_shell() == 'bash':
rcfile = '~/.profile'
else:
rcfile = '~/.%src' % get_shell()
print("\nPlease manually add the following line(s) to %s:" % rcfile)
print('\n\t' + source_msg)
if get_shell() == 'zsh':
print("\n\tautoload -U compinit && compinit -u")
print("\nPlease restart terminal(s) before running autojump.\n")
def main(args):
if args.dryrun:
print("Installing autojump to %s (DRYRUN)..." % args.destdir)
else:
print("Installing autojump to %s ..." % args.destdir)
bin_dir = os.path.join(args.destdir, args.prefix, 'bin')
etc_dir = os.path.join(args.destdir, 'etc/profile.d')
doc_dir = os.path.join(args.destdir, args.prefix, 'share/man/man1')
icon_dir = os.path.join(args.destdir, args.prefix, 'share/autojump')
zshshare_dir = os.path.join(args.destdir, args.zshshare)
mkdir(bin_dir, args.dryrun)
mkdir(etc_dir, args.dryrun)
mkdir(doc_dir, args.dryrun)
mkdir(icon_dir, args.dryrun)
mkdir(zshshare_dir, args.dryrun)
cp('./bin/autojump', bin_dir, args.dryrun)
cp('./bin/autojump_argparse.py', bin_dir, args.dryrun)
cp('./bin/autojump_data.py', bin_dir, args.dryrun)
cp('./bin/autojump_utils.py', bin_dir, args.dryrun)
cp('./bin/autojump.sh', etc_dir, args.dryrun)
cp('./bin/autojump.bash', etc_dir, args.dryrun)
cp('./bin/autojump.fish', etc_dir, args.dryrun)
cp('./bin/autojump.zsh', etc_dir, args.dryrun)
cp('./bin/_j', zshshare_dir, args.dryrun)
cp('./bin/icon.png', icon_dir, args.dryrun)
cp('./docs/autojump.1', doc_dir, args.dryrun)
if args.custom_install:
modify_autojump_sh(etc_dir, args.dryrun)
print_post_installation_message(etc_dir)
if __name__ == "__main__":
sys.exit(main(parse_arguments()))
| mdlawson/autojump | install.py | Python | gpl-3.0 | 5,781 | 0.000346 |
## Code outside the data string, and the setup and action blocks is ignored
## If manually editing, you must reload the code. Delete the resource timestamp so kaithem knows it's new
__data__="""
{continual: false, enable: true, once: true, priority: interactive, rate-limit: 0.0,
resource-timestamp: 1645141613510257, resource-type: event}
"""
__trigger__='False'
if __name__=='__setup__':
#This code runs once when the event loads. It also runs when you save the event during the test compile
#and may run multiple times when kaithem boots due to dependancy resolution
__doc__=''
def nbr():
return(50, '<a href="/pages/Beholder/ui"><i class="icofont-castle"></i>Beholder</a>')
kaithem.web.navBarPlugins['Beholder']=nbr
def eventAction():
pass
| EternityForest/KaithemAutomation | kaithem/data/modules/Beholder/main.py | Python | gpl-3.0 | 787 | 0.01906 |
# Copyright 2009 Google Inc. Released under the GPL v2
"""This is a convenience module to import all available types of hosts.
Implementation details:
You should 'import hosts' instead of importing every available host module.
"""
from autotest_lib.client.common_lib import utils
import base_classes
Host = utils.import_site_class(
__file__, "autotest_lib.client.common_lib.hosts.site_host", "SiteHost",
base_classes.Host)
| clebergnu/autotest | client/common_lib/hosts/__init__.py | Python | gpl-2.0 | 435 | 0 |
'''
New Integration Test for resizing root volume.
@author: czhou25
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.volume_operations as vol_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header
import time
import os
vm = None
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
global vm
vm_creation_option = test_util.VmOption()
image_name = os.environ.get('imageName_net')
l3_name = os.environ.get('l3VlanNetworkName1')
vm = test_stub.create_vm("test_resize_vm", image_name, l3_name)
test_obj_dict.add_vm(vm)
vm.check()
vm.stop()
vm.check()
vol_size = test_lib.lib_get_root_volume(vm.get_vm()).size
volume_uuid = test_lib.lib_get_root_volume(vm.get_vm()).uuid
set_size = 1024*1024*1024*5
snapshots = test_obj_dict.get_volume_snapshot(volume_uuid)
snapshots.set_utility_vm(vm)
snapshots.create_snapshot('create_snapshot1')
snapshots.check()
vol_ops.resize_volume(volume_uuid, set_size)
vm.update()
vol_size_after = test_lib.lib_get_root_volume(vm.get_vm()).size
if set_size != vol_size_after:
test_util.test_fail('Resize Root Volume failed, size = %s' % vol_size_after)
snapshots.delete()
test_obj_dict.rm_volume_snapshot(snapshots)
test_lib.lib_error_cleanup(test_obj_dict)
test_util.test_pass('Resize VM Snapshot Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict)
| zstackorg/zstack-woodpecker | integrationtest/vm/multihosts/volumes/test_snapshot_resize_vm.py | Python | apache-2.0 | 1,782 | 0.002806 |
# -*- coding: utf-8 -*-
"""
pytest fixtures
"""
import pytest
from django.contrib.auth.models import User
from orb.models import Category, Tag, UserProfile
from orb.peers.models import Peer
from orb.resources.tests.factory import resource_factory
pytestmark = pytest.mark.django_db
@pytest.fixture
def testing_user():
user, _ = User.objects.get_or_create(username="tester")
user.set_password("password")
user.save()
yield user
@pytest.fixture
def testing_profile(testing_user):
yield UserProfile.objects.create(user=testing_user)
@pytest.fixture()
def import_user():
user, _ = User.objects.get_or_create(username="importer")
user.set_password("password")
user.save()
yield user
@pytest.fixture
def importer_profile(import_user):
yield UserProfile.objects.create(user=import_user)
@pytest.fixture
def sample_category():
category, _ = Category.objects.get_or_create(name="test category")
yield category
@pytest.fixture
def sample_tag(sample_category, testing_user):
tag, _ = Tag.objects.get_or_create(name="test tag", defaults={
"category": sample_category,
"create_user": testing_user,
"update_user": testing_user,
})
yield tag
@pytest.fixture
def role_category():
category, _ = Category.objects.get_or_create(name="audience")
yield category
@pytest.fixture
def role_tag(role_category, testing_user):
tag, _ = Tag.objects.get_or_create(name="cadre", defaults={
"category": role_category,
"create_user": testing_user,
"update_user": testing_user,
})
assert Tag.tags.roles()
yield tag
@pytest.fixture
def test_resource(testing_user):
yield resource_factory(
user=testing_user,
title=u"Básica salud del recién nacido",
description=u"Básica salud del recién nacido",
)
@pytest.fixture(scope="session")
def test_peer():
peer = Peer.peers.create(name="Distant ORB", host="http://www.orb.org/")
yield peer
@pytest.fixture(scope="session")
def remote_resource(import_user, test_peer):
"""Fixture for a remotely created resource"""
yield resource_factory(
user=import_user,
title=u"A remote resource",
description=u"<p>A remote resource</p>",
source_peer=test_peer,
)
| mPowering/django-orb | orb/fixtures/__init__.py | Python | gpl-3.0 | 2,306 | 0.000869 |
import os
import sys
import shutil
import errno
import time
import hashlib
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions
if "TRAVIS_BUILD_NUMBER" in os.environ:
if "SAUCE_USERNAME" not in os.environ:
print "No sauce labs login credentials found. Stopping tests..."
sys.exit(0)
capabilities = {'browserName': "firefox"}
capabilities['platform'] = "Windows 7"
capabilities['version'] = "48.0"
capabilities['screenResolution'] = "1280x1024"
capabilities["build"] = os.environ["TRAVIS_BUILD_NUMBER"]
capabilities["tunnel-identifier"] = os.environ["TRAVIS_JOB_NUMBER"]
# connect to sauce labs
username = os.environ["SAUCE_USERNAME"]
access_key = os.environ["SAUCE_ACCESS_KEY"]
hub_url = "%s:%s@localhost:4445" % (username, access_key)
driver = webdriver.Remote(command_executor="http://%s/wd/hub" % hub_url, desired_capabilities=capabilities)
else:
# local
print "Using LOCAL webdriver"
profile = webdriver.FirefoxProfile()
profile.set_preference("intl.accept_languages", "en")
driver = webdriver.Firefox(profile)
driver.maximize_window()
def write_random_file(size, filename):
if not os.path.exists(os.path.dirname(filename)):
try:
os.makedirs(os.path.dirname(filename))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
with open(filename, 'wb') as fout:
fout.write(os.urandom(size))
def sha1_file(filename):
BLOCKSIZE = 65536
hasher = hashlib.sha1()
with open(filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
return hasher.hexdigest()
def sha1_folder(folder):
sha1_dict = {}
for root, dirs, files in os.walk(folder):
for filename in files:
file_path = os.path.join(root, filename)
sha1 = sha1_file(file_path)
relative_file_path = os.path.relpath(file_path, folder)
sha1_dict.update({relative_file_path: sha1})
return sha1_dict
def wait_for_text(time, xpath, text):
WebDriverWait(driver, time).until(expected_conditions.text_to_be_present_in_element((By.XPATH, xpath), text))
BACKUP_NAME = "BackupName"
PASSWORD = "the_backup_password_is_really_long_and_safe"
SOURCE_FOLDER = os.path.abspath("duplicati_gui_test_source")
DESTINATION_FOLDER = os.path.abspath("duplicati_gui_test_destination")
DESTINATION_FOLDER_DIRECT_RESTORE = os.path.abspath("duplicati_gui_test_destination_direct_restore")
RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_restore")
DIRECT_RESTORE_FOLDER = os.path.abspath("duplicati_gui_test_direct_restore")
# wait 5 seconds for duplicati server to start
time.sleep(5)
driver.implicitly_wait(10)
driver.get("http://localhost:8200/ngax/index.html")
if "Duplicati" not in driver.title:
raise Exception("Unable to load duplicati GUI!")
# Create and hash random files in the source folder
write_random_file(1024 * 1024, SOURCE_FOLDER + os.sep + "1MB.test")
write_random_file(100 * 1024, SOURCE_FOLDER + os.sep + "subfolder" + os.sep + "100KB.test")
sha1_source = sha1_folder(SOURCE_FOLDER)
# Dismiss the password request
driver.find_element_by_link_text("No, my machine has only a single account").click()
# Add new backup
driver.find_element_by_link_text("Add backup").click()
# Choose the "add new" option
driver.find_element_by_id("blank").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
# Add new backup - General page
time.sleep(1)
driver.find_element_by_id("name").send_keys(BACKUP_NAME)
driver.find_element_by_id("passphrase").send_keys(PASSWORD)
driver.find_element_by_id("repeat-passphrase").send_keys(PASSWORD)
driver.find_element_by_id("nextStep1").click()
# Add new backup - Destination page
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER)
driver.find_element_by_id("nextStep2").click()
# Add new backup - Source Data page
driver.find_element_by_id("sourcePath").send_keys(os.path.abspath(SOURCE_FOLDER) + os.sep)
driver.find_element_by_id("sourceFolderPathAdd").click()
driver.find_element_by_id("nextStep3").click()
# Add new backup - Schedule page
useScheduleRun = driver.find_element_by_id("useScheduleRun")
if useScheduleRun.is_selected():
useScheduleRun.click()
driver.find_element_by_id("nextStep4").click()
# Add new backup - Options page
driver.find_element_by_id("save").click()
# Run the backup job and wait for finish
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//dl[@class='taskmenu']/dd/p/span[contains(text(),'Run now')]") if n.is_displayed()][0].click()
wait_for_text(60, "//div[@class='task ng-scope']/dl[2]/dd[1]", "(took ")
# Restore
if len([n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()]) == 0:
driver.find_element_by_link_text(BACKUP_NAME).click()
[n for n in driver.find_elements_by_xpath("//span[contains(text(),'Restore files ...')]") if n.is_displayed()][0].click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash restored files
sha1_restore = sha1_folder(RESTORE_FOLDER)
# cleanup: delete source and restore folder and rename destination folder for direct restore
shutil.rmtree(SOURCE_FOLDER)
shutil.rmtree(RESTORE_FOLDER)
os.rename(DESTINATION_FOLDER, DESTINATION_FOLDER_DIRECT_RESTORE)
# direct restore
driver.find_element_by_link_text("Restore").click()
# Choose the "restore direct" option
driver.find_element_by_id("direct").click()
driver.find_element_by_xpath("//input[@class='submit next']").click()
time.sleep(1)
driver.find_element_by_link_text("Manually type path").click()
driver.find_element_by_id("file_path").send_keys(DESTINATION_FOLDER_DIRECT_RESTORE)
driver.find_element_by_id("nextStep1").click()
driver.find_element_by_id("password").send_keys(PASSWORD)
driver.find_element_by_id("connect").click()
driver.find_element_by_xpath("//span[contains(text(),'" + SOURCE_FOLDER + "')]") # wait for filelist
time.sleep(1)
driver.find_element_by_xpath("//restore-file-picker/ul/li/div/a[2]").click() # select root folder checkbox
time.sleep(1)
driver.find_element_by_xpath("//form[@id='restore']/div[1]/div[@class='buttons']/a/span[contains(text(), 'Continue')]").click()
driver.find_element_by_id("restoretonewpath").click()
driver.find_element_by_id("restore_path").send_keys(DIRECT_RESTORE_FOLDER)
driver.find_element_by_xpath("//form[@id='restore']/div/div[@class='buttons']/a/span[contains(text(),'Restore')]").click()
# wait for restore to finish
wait_for_text(60, "//form[@id='restore']/div[3]/h3/div[1]", "Your files and folders have been restored successfully.")
# hash direct restore files
sha1_direct_restore = sha1_folder(DIRECT_RESTORE_FOLDER)
print "Source hashes: " + str(sha1_source)
print "Restore hashes: " + str(sha1_restore)
print "Direct Restore hashes: " + str(sha1_direct_restore)
# Tell Sauce Labs to stop the test
driver.quit()
if not (sha1_source == sha1_restore and sha1_source == sha1_direct_restore):
sys.exit(1) # return with error
| agrajaghh/duplicati | guiTests/guiTest.py | Python | lgpl-2.1 | 8,072 | 0.002602 |
data = {}
def setup():
global data
from .schema import Ship, Faction
xwing = Ship(id="1", name="X-Wing")
ywing = Ship(id="2", name="Y-Wing")
awing = Ship(id="3", name="A-Wing")
# Yeah, technically it's Corellian. But it flew in the service of the rebels,
# so for the purposes of this demo it's a rebel ship.
falcon = Ship(id="4", name="Millennium Falcon")
homeOne = Ship(id="5", name="Home One")
tieFighter = Ship(id="6", name="TIE Fighter")
tieInterceptor = Ship(id="7", name="TIE Interceptor")
executor = Ship(id="8", name="Executor")
rebels = Faction(
id="1", name="Alliance to Restore the Republic", ships=["1", "2", "3", "4", "5"]
)
empire = Faction(id="2", name="Galactic Empire", ships=["6", "7", "8"])
data = {
"Faction": {"1": rebels, "2": empire},
"Ship": {
"1": xwing,
"2": ywing,
"3": awing,
"4": falcon,
"5": homeOne,
"6": tieFighter,
"7": tieInterceptor,
"8": executor,
},
}
def create_ship(ship_name, faction_id):
from .schema import Ship
next_ship = len(data["Ship"].keys()) + 1
new_ship = Ship(id=str(next_ship), name=ship_name)
data["Ship"][new_ship.id] = new_ship
data["Faction"][faction_id].ships.append(new_ship.id)
return new_ship
def get_ship(_id):
return data["Ship"][_id]
def get_faction(_id):
return data["Faction"][_id]
def get_rebels():
return get_faction("1")
def get_empire():
return get_faction("2")
| graphql-python/graphene | examples/starwars_relay/data.py | Python | mit | 1,593 | 0.001255 |
import flickrapi
import csv
import sys
import datetime
import argparse
import os
# environment variable keys
ENV_KEY = 'FLICKR_API_KEY'
ENV_SECRET = 'FLICKR_API_SECRET'
MAX_PHOTOS_PER_PAGE = 500
# column headers for output file
columns = ['Title', 'Upload date', 'photo_id', 'url', 'Description',
'View count', 'Favs count', 'Comments count']
# setup flickr api
flickr = None
def main():
global flickr
# parse arguments
userId, fname, api_key, api_secret = parseArgs()
# check if user provided api key/secret
if not api_key or not api_secret:
# try to get key/secret from environment variables
api_key = os.getenv(ENV_KEY)
api_secret = os.getenv(ENV_SECRET)
# exit if we still dont' have key/secret
if not api_key or not api_secret:
sys.exit('No Flickr API key and secret. Either provide the key '
'and secret as options (--key and --secret) or set them '
'as environment variables.')
# initialize flickr api
flickr = flickrapi.FlickrAPI(api_key, api_secret)
# get number of photos for the user
userInfo = flickr.people.getInfo(user_id=userId)
count = int(userInfo[0].find('photos').find('count').text)
pages = count / MAX_PHOTOS_PER_PAGE + 1
print('Counting views for %d photos...' % (count))
# get list of photos
photo_pages = []
for page in range(1, pages + 1):
photo_pages.append(
flickr.photos.search(
user_id=userId, per_page=str(MAX_PHOTOS_PER_PAGE), page=page))
# get view count for each photo
data = []
for photo_page in photo_pages:
for photo in photo_page[0]:
data.append(get_photo_data(photo.get('id')))
# write counts to output
if (fname is not None):
rows = create_rows_from_data(data)
write_to_csv(fname, columns, rows)
print('Photo data successfully written to %s (this could take hours '
'if you have hundreds of photos)' % (fname))
# display view count for photos
print('Total photo views: %d' % (calc_total_views_from_data(data)))
def parseArgs():
# parse arguments and do error checking
parser = argparse.ArgumentParser()
parser.add_argument('user_id',
help='The id of the user whose total views will be '
'counted.',
default='.')
parser.add_argument('--output',
help='Name of the output file',
default=None)
parser.add_argument('--key',
help='Flickr API key (use once for setup)',
default=None)
parser.add_argument('--secret',
help='Flickr API secret (use once for setup)',
default=None)
args = parser.parse_args()
return args.user_id, args.output, args.key, args.secret
def calc_total_views_from_data(data):
total = 0
for photo in data:
total += int(photo['info'][0].attrib['views'])
return total
def create_rows_from_data(data):
rows = []
for photo in data:
title = photo['info'][0].find('title').text
upload_date = photo['info'][0].get('dateuploaded')
upload_date = datetime.datetime.fromtimestamp(
int(upload_date)).strftime('%Y-%m-%d %H:%M:%S')
photo_id = photo['info'][0].get('id')
url = photo['info'][0].find('urls')[0].text
description = photo['info'][0].find('description').text
if description is None:
description = ''
views = photo['info'][0].get('views')
favs = photo['favs'][0].get('total')
comments = photo['info'][0].find('comments').text
# output as delimited text
row = [title, upload_date, str(photo_id), url, description,
str(views), str(favs), str(comments)]
rows.append(row)
return rows
def get_photo_data(photo_id):
info = flickr.photos.getinfo(photo_id=photo_id)
favs = flickr.photos.getFavorites(photo_id=photo_id)
return {'info': info, 'favs': favs}
def write_to_csv(fname, header, rows):
with open(fname, 'wb') as csvfile:
csvwriter = csv.writer(csvfile, delimiter=',', quotechar='|',
quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(header)
for row in rows:
csvwriter.writerow(
[s.encode("utf-8").replace(',', '').replace('\n', '')
for s in row])
if __name__ == "__main__":
main()
| jordanjoz1/flickr-views-counter | count_views.py | Python | mit | 4,590 | 0 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class StockMoveLine(models.Model):
_inherit = 'stock.move.line'
@api.model_create_multi
def create(self, vals_list):
records = super(StockMoveLine, self).create(vals_list)
records.filtered(lambda ml: ml.move_id.is_subcontract).move_id._check_overprocessed_subcontract_qty()
return records
def write(self, values):
res = super(StockMoveLine, self).write(values)
self.filtered(lambda ml: ml.move_id.is_subcontract).move_id._check_overprocessed_subcontract_qty()
return res
def _should_bypass_reservation(self, location):
""" If the move line is subcontracted then ignore the reservation. """
should_bypass_reservation = super(StockMoveLine, self)._should_bypass_reservation(location)
if not should_bypass_reservation and self.move_id.is_subcontract:
return True
return should_bypass_reservation
| ygol/odoo | addons/mrp_subcontracting/models/stock_move_line.py | Python | agpl-3.0 | 1,039 | 0.002887 |
from PyQt4.QtNetwork import QNetworkAccessManager, QNetworkRequest
from PyQt4 import QtGui, QtCore
import urllib2
import logging
import os
import util
import warnings
logger= logging.getLogger(__name__)
VAULT_PREVIEW_ROOT = "http://content.faforever.com/faf/vault/map_previews/small/"
class downloadManager(QtCore.QObject):
''' This class allows downloading stuff in the background'''
def __init__(self, parent = None):
self.client = parent
self.nam = QNetworkAccessManager()
self.nam.finished.connect(self.finishedDownload)
self.modRequests = {}
self.mapRequests = {}
self.mapRequestsItem = []
def finishedDownload(self,reply):
''' finishing downloads '''
urlstring = reply.url().toString()
reqlist = []
if urlstring in self.mapRequests: reqlist = self.mapRequests[urlstring]
if urlstring in self.modRequests: reqlist = self.modRequests[urlstring]
if reqlist:
#save the map from cache
name = os.path.basename(reply.url().toString())
pathimg = os.path.join(util.CACHE_DIR, name)
img = QtCore.QFile(pathimg)
img.open(QtCore.QIODevice.WriteOnly)
img.write(reply.readAll())
img.close()
if os.path.exists(pathimg):
#Create alpha-mapped preview image
try:
pass # the server already sends 100x100 pic
# img = QtGui.QImage(pathimg).scaled(100,100)
# img.save(pathimg)
except:
pathimg = "games/unknown_map.png"
logger.info("Failed to resize " + name)
else :
pathimg = "games/unknown_map.png"
logger.debug("Web Preview failed for: " + name)
logger.debug("Web Preview used for: " + name)
for requester in reqlist:
if requester:
if requester in self.mapRequestsItem:
requester.setIcon(0, util.icon(pathimg, False))
self.mapRequestsItem.remove(requester)
else:
requester.setIcon(util.icon(pathimg, False))
if urlstring in self.mapRequests: del self.mapRequests[urlstring]
if urlstring in self.modRequests: del self.modRequests[urlstring]
def downloadMap(self, name, requester, item=False):
'''
Downloads a preview image from the web for the given map name
'''
#This is done so generated previews always have a lower case name. This doesn't solve the underlying problem (case folding Windows vs. Unix vs. FAF)
name = name.lower()
if len(name) == 0:
return
url = QtCore.QUrl(VAULT_PREVIEW_ROOT + urllib2.quote(name) + ".png")
if not url.toString() in self.mapRequests:
logger.debug("Searching map preview for: " + name)
self.mapRequests[url.toString()] = []
request = QNetworkRequest(url)
self.nam.get(request)
self.mapRequests[url.toString()].append(requester)
else :
self.mapRequests[url.toString()].append(requester)
if item:
self.mapRequestsItem.append(requester)
def downloadModPreview(self, strurl, requester):
url = QtCore.QUrl(strurl)
if not url.toString() in self.modRequests:
logger.debug("Searching mod preview for: " + os.path.basename(strurl).rsplit('.',1)[0])
self.modRequests[url.toString()] = []
request = QNetworkRequest(url)
self.nam.get(request)
self.modRequests[url.toString()].append(requester)
| Blackclaws/client | src/downloadManager/__init__.py | Python | gpl-3.0 | 3,779 | 0.00688 |
from __future__ import print_function
import logging
from twisted.internet import reactor, ssl
from txjsonrpc.web.jsonrpc import Proxy
from OpenSSL import SSL
from twisted.python import log
def printValue(value):
print("Result: %s" % str(value))
def printError(error):
print('error', error)
def shutDown(data):
print("Shutting down reactor...")
reactor.stop()
def verifyCallback(connection, x509, errnum, errdepth, ok):
log.msg(connection.__str__())
if not ok:
log.msg('invalid server cert: %s' % x509.get_subject(), logLevel=logging.ERROR)
return False
else:
log.msg('good server cert: %s' % x509.get_subject(), logLevel=logging.INFO)
return True
class AltCtxFactory(ssl.ClientContextFactory):
def getContext(self):
#self.method = SSL.SSLv23_METHOD
ctx = ssl.ClientContextFactory.getContext(self)
ctx.set_verify(SSL.VERIFY_PEER, verifyCallback)
ctx.load_verify_locations("cacert.pem")
#ctx.use_certificate_file('keys/client.crt')
#ctx.use_privatekey_file('keys/client.key')
return ctx
import sys
log.startLogging(sys.stdout)
#proxy = Proxy('https://127.0.0.1:7443/', ssl_ctx_factory=AltCtxFactory)
proxy = Proxy('https://127.0.0.2:7443/', ssl_ctx_factory=AltCtxFactory)
d = proxy.callRemote('add', 3, 5)
d.addCallback(printValue).addErrback(printError).addBoth(shutDown)
reactor.run()
| oubiwann/txjsonrpc | examples/ssl/client.py | Python | mit | 1,418 | 0.009168 |
"""
Support for LIFX Cloud scenes.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/scene.lifx_cloud/
"""
import asyncio
import logging
import voluptuous as vol
import aiohttp
import async_timeout
from homeassistant.components.scene import Scene
from homeassistant.const import (CONF_PLATFORM, CONF_TOKEN, CONF_TIMEOUT)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.aiohttp_client import (async_get_clientsession)
_LOGGER = logging.getLogger(__name__)
LIFX_API_URL = 'https://api.lifx.com/v1/{0}'
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'lifx_cloud',
vol.Required(CONF_TOKEN): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
})
# pylint: disable=unused-argument
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the scenes stored in the LIFX Cloud."""
token = config.get(CONF_TOKEN)
timeout = config.get(CONF_TIMEOUT)
headers = {
"Authorization": "Bearer %s" % token,
}
url = LIFX_API_URL.format('scenes')
try:
httpsession = async_get_clientsession(hass)
with async_timeout.timeout(timeout, loop=hass.loop):
scenes_resp = yield from httpsession.get(url, headers=headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.exception("Error on %s", url)
return False
status = scenes_resp.status
if status == 200:
data = yield from scenes_resp.json()
devices = []
for scene in data:
devices.append(LifxCloudScene(hass, headers, timeout, scene))
async_add_devices(devices)
return True
elif status == 401:
_LOGGER.error("Unauthorized (bad token?) on %s", url)
return False
_LOGGER.error("HTTP error %d on %s", scenes_resp.status, url)
return False
class LifxCloudScene(Scene):
"""Representation of a LIFX Cloud scene."""
def __init__(self, hass, headers, timeout, scene_data):
"""Initialize the scene."""
self.hass = hass
self._headers = headers
self._timeout = timeout
self._name = scene_data["name"]
self._uuid = scene_data["uuid"]
@property
def name(self):
"""Return the name of the scene."""
return self._name
@asyncio.coroutine
def async_activate(self):
"""Activate the scene."""
url = LIFX_API_URL.format('scenes/scene_id:%s/activate' % self._uuid)
try:
httpsession = async_get_clientsession(self.hass)
with async_timeout.timeout(self._timeout, loop=self.hass.loop):
yield from httpsession.put(url, headers=self._headers)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.exception("Error on %s", url)
| MungoRae/home-assistant | homeassistant/components/scene/lifx_cloud.py | Python | apache-2.0 | 2,931 | 0 |
#====================================================================================================
# C L A S S E S concerning the site description
#====================================================================================================
#---------------------------------------------------------------------------------------------------
"""
Class: SiteProperties(siteName='')
Each site will be fully described for our application in this class.
"""
#---------------------------------------------------------------------------------------------------
import time, statistics
class SiteProperties:
"A SiteProperties defines all needed site properties."
def __init__(self, siteName):
self.name = siteName
self.datasetRanks = {}
self.rankSum = 0
self.datasetSizes = {}
self.dsetIsValid = {}
self.dsetIsCustodial = {}
self.dsetLastCopy = {}
self.dsetIsPartial = {}
self.deprecated = {}
self.dsetReqTime = {}
self.dsetUpdTime = {}
self.dsetIsDone = {}
self.dsetNotUsedOnTape = {}
self.wishList = []
self.datasetsToDelete = []
self.protectedList = []
self.siteSizeGbV = 0
self.spaceTakenV = 0
self.spaceNotUsed = 0
self.spaceLCp = 0
self.space2free = 0
self.deleted = 0
self.protected = 0
self.globalDsetIndex = 0
self.epochTime = int(time.time())
def addDataset(self,dset,rank,size,valid,partial,custodial,depr,reqtime,updtime,wasused,isdone):
self.dsetIsValid[dset] = valid
self.dsetIsPartial[dset] = partial
self.dsetIsCustodial[dset] = custodial
self.datasetRanks[dset] = rank
self.datasetSizes[dset] = size
if depr:
self.deprecated[dset] = depr
self.spaceTakenV = self.spaceTakenV + size
self.dsetIsDone[dset] = isdone
self.dsetReqTime[dset] = reqtime
self.dsetUpdTime[dset] = updtime
self.rankSum = self.rankSum + rank*size
if wasused == 0:
self.spaceNotUsed = self.spaceNotUsed + size
def makeWishList(self, dataPropers, ncopyMin, banInvalid=True):
space = 0
self.wishList = []
space2free = self.space2free
addedExtra = 0
counter = 0
for datasetName in sorted(self.datasetRanks.keys(), cmp=self.compare):
counter = counter + 1
if counter < self.globalDsetIndex:
continue
if space > (space2free-self.deleted):
break
if datasetName in self.datasetsToDelete:
continue
if datasetName in self.protectedList:
continue
#custodial set can't be on deletion wish list
if self.dsetIsCustodial[datasetName] :
continue
#if dataPropers[datasetName].daysSinceUsed() > 540:
if dataPropers[datasetName].isFullOnTape():
#delta = (self.epochTime - self.dsetUpdTime[datasetName])/(60*60*24)
if dataPropers[datasetName].getGlobalRank() > 500:
#if delta > 500:
space = space + self.datasetSizes[datasetName]
self.wishList.append(datasetName)
dataPropers[datasetName].kickFromPool = True
print "exp at " + self.name + ": " + datasetName
#print datasetName
#addedExtra = addedExtra + 1
continue
if "/RECO" in datasetName:
delta = (self.epochTime - self.dsetUpdTime[datasetName])/(60*60*24)
#if dataPropers[datasetName].daysSinceUsed() > 180 and delta>180:
if delta > 180:
space = space + self.datasetSizes[datasetName]
self.wishList.append(datasetName)
dataPropers[datasetName].kickFromPool = True
print "RECO " + self.name + ": " + datasetName
continue
else:
continue
#non-valid dataset can't be on deletion list
if banInvalid == True:
if not self.dsetIsValid[datasetName]:
continue
dataPr = dataPropers[datasetName]
if dataPr.nSites() > ncopyMin:
space = space + self.datasetSizes[datasetName]
self.wishList.append(datasetName)
self.globalDsetIndex = counter
def hasMoreToDelete(self, dataPropers, ncopyMin, banInvalid):
counter = 0
if self.globalDsetIndex >= len(self.datasetRanks.keys()):
return False
for datasetName in sorted(self.datasetRanks.keys(), cmp=self.compare):
counter = counter + 1
if counter < self.globalDsetIndex:
continue
if '/MINIAOD' in datasetName:
ncopyMinTemp = 3
else:
ncopyMinTemp = ncopyMin
if datasetName in self.datasetsToDelete:
continue
if datasetName in self.protectedList:
continue
#custodial set can't be on deletion wish list
if self.dsetIsCustodial[datasetName] :
continue
#non-valid dataset can't be on deletion list
if banInvalid == True:
if not self.dsetIsValid[datasetName]:
continue
if datasetName in self.wishList:
continue
dataPr = dataPropers[datasetName]
if dataPr.nSites() <= ncopyMinTemp:
continue
return True
return False
def onWishList(self,dset):
if dset in self.wishList:
return True
return False
def onProtectedList(self,dset):
if dset in self.protectedList:
return True
return False
def wantToDelete(self):
if self.deleted < self.space2free:
return True
else:
return False
def grantWish(self,dset):
if dset in self.protectedList:
return False
if dset in self.datasetsToDelete:
return False
#if self.deleted > self.space2free:
# return False
self.datasetsToDelete.append(dset)
self.deleted = self.deleted + self.datasetSizes[dset]
return True
def revokeWish(self,dset):
if dset in self.datasetsToDelete:
self.datasetsToDelete.remove(dset)
self.deleted = self.deleted - self.datasetSizes[dset]
def canBeLastCopy(self,dset,banInvalid):
if not banInvalid:
return True
#can't be partial dataset
if dset not in self.dsetIsPartial:
return False
if self.dsetIsPartial[dset] :
return False
#can't be non-valid dataset
if not self.dsetIsValid[dset]:
return False
return True
def pinDataset(self,dset):
if dset in self.datasetsToDelete:
return False
#can't pin partial dataset
if self.dsetIsPartial[dset] :
return False
#can't pin non-valid dataset
if not self.dsetIsValid[dset]:
return False
self.protectedList.append(dset)
self.protected = self.protected + self.datasetSizes[dset]
if dset in self.wishList:
self.wishList.remove(dset)
return True
def lastCopySpace(self,datasets,nCopyMin):
space = 0
self.dsetLastCopy = {}
for dset in self.datasetSizes.keys():
if dset in self.datasetsToDelete:
continue
dataset = datasets[dset]
remaining = dataset.nSites() - dataset.nBeDeleted()
if remaining <= nCopyMin:
self.dsetLastCopy[dset] = 1
space = space + self.datasetSizes[dset]
self.spaceLCp = space
def setSiteSize(self,size):
self.siteSizeGbV = size
def siteSizeGb(self):
return self.siteSizeGbV
def dsetRank(self,set):
return self.datasetRanks[set]
def dsetSize(self,set):
return self.datasetSizes[set]
def isPartial(self,set):
return self.dsetIsPartial[set]
def siteName(self):
return self.name
def spaceTaken(self):
return self.spaceTakenV
def spaceDeleted(self):
return self.deleted
def spaceProtected(self):
return self.protected
def spaceFree(self):
return self.siteSizeGbV - (self.spaceTakenV - self.deleted)
def spaceLastCp(self):
return self.spaceLCp
def isDeprecated(self,dset):
if dset in self.deprecated:
return True
return False
def spaceDeprecated(self):
size = 0
for dset in self.deprecated:
size = size + self.datasetSizes[dset]
return size
def spaceIncomplete(self):
size = 0;
for dset in self.dsetIsPartial:
if self.dsetIsPartial[dset]:
size = size + self.datasetSizes[dset]
return size
def spaceCustodial(self):
size = 0;
for dset in self.dsetIsCustodial:
if self.dsetIsCustodial[dset]:
size = size + self.datasetSizes[dset]
return size
def spaceUtouchable(self):
size = 0
for dset in self.dsetLastCopy:
size = size + self.datasetSizes[dset]
for dset in self.dsetIsCustodial:
if dset in self.dsetLastCopy:
continue
if self.dsetIsCustodial[dset]:
size = size + self.datasetSizes[dset]
return size
def nsetsDeprecated(self):
nsets = 0
for dset in self.deprecated:
nsets = nsets + 1
return nsets
def hasDataset(self,dset):
if dset in self.datasetRanks:
return True
else:
return False
def willDelete(self,dset):
if dset in self.datasetsToDelete:
return True
else:
return False
def allSets(self):
return sorted(self.datasetRanks.keys(), cmp=self.compare)
def delTargets(self):
return sorted(self.datasetsToDelete, cmp=self.compare)
def protectedSets(self):
return sorted(self.protectedList, cmp=self.compare)
def setSpaceToFree(self,size):
self.space2free = size
def reqTime(self,dset):
return self.dsetReqTime[dset]
def dsetLoadTime(self,dset):
return (self.dsetUpdTime[dset] - self.dsetReqTime[dset])
def spaceUnused(self):
return self.spaceNotUsed
def siteRank(self):
if self.spaceTakenV == 0:
return 0
return self.rankSum/self.spaceTakenV
def medianRank(self):
if len(self.datasetRanks.values()) > 0:
return statistics.median(self.datasetRanks.values())
return 0
def dsetIsStuck(self,dset):
if self.dsetIsDone[dset] == 0:
reqtime = self.dsetReqTime[dset]
if (self.epochTime - reqtime) > 60*60*24*14:
return 1
return 0
def considerForStats(self,dset):
if self.dsetLoadTime(dset) > 60*60*24*14:
return False
if self.dsetLoadTime(dset) <= 0:
return False
if (self.epochTime - self.dsetReqTime[dset]) > 60*60*24*90:
return False
return True
def getDownloadStats(self):
loadSize = 0
loadTime = 0
stuck = 0
for dset in self.datasetSizes:
if self.dsetIsStuck(dset) == 1:
stuck = stuck + 1
continue
if not self.considerForStats(dset):
continue
if self.datasetSizes[dset] > 10:
loadSize = loadSize + self.datasetSizes[dset]
loadTime = loadTime + self.dsetLoadTime(dset)
speed = 0
if loadTime > 0:
speed = loadSize/loadTime*(60*60*24)
return (speed, loadSize, stuck)
def getAverage(self,array):
if len(array) < 3: return 0
sortA = sorted(array)
diff = 100
prevMean = sortA[len(sortA)/2]
prevRms = sortA[len(sortA)-1] - sortA[0]
print sortA
while diff > 0.01:
ave = 0
aveSq = 0
nit = 0
for i in range(1, len(sortA)):
if abs(sortA[i] - prevMean) > 1.6*prevRms:
continue
ave = ave + sortA[i]
aveSq = aveSq + sortA[i]*sortA[i]
nit = nit + 1
ave = ave/nit
rms = math.sqrt(aveSq/nit - ave*ave)
diff = abs(ave - prevMean)/prevMean
prevMean = ave
prevRms = rms
return prevMean
def compare(self,item1, item2):
r1 = self.datasetRanks[item1]
r2 = self.datasetRanks[item2]
if r1 < r2:
return 1
elif r1 > r2:
return -1
else:
return 0
| sidnarayanan/IntelROCCS | Detox/python/siteProperties.py | Python | mit | 12,964 | 0.011571 |
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pecan import rest
from pecan import expose
from pecan import request
from mistral.openstack.common import log as logging
from mistral.db import api as db_api
from mistral.services import scheduler
LOG = logging.getLogger(__name__)
class WorkbookDefinitionController(rest.RestController):
@expose()
def get(self, workbook_name):
LOG.debug("Fetch workbook definition [workbook_name=%s]" %
workbook_name)
return db_api.workbook_definition_get(workbook_name)
@expose(content_type="text/plain")
def put(self, workbook_name):
text = request.text
LOG.debug("Update workbook definition [workbook_name=%s, text=%s]" %
(workbook_name, text))
wb = db_api.workbook_definition_put(workbook_name, text)
scheduler.create_associated_triggers(wb)
return wb['definition']
| TimurNurlygayanov/mistral | mistral/api/controllers/v1/workbook_definition.py | Python | apache-2.0 | 1,511 | 0 |
from warnings import warn
from beaker.crypto.pbkdf2 import PBKDF2, strxor
from beaker.crypto.util import hmac, sha1, hmac_sha1, md5
from beaker import util
keyLength = None
if util.jython:
try:
from beaker.crypto.jcecrypto import getKeyLength, aesEncrypt
keyLength = getKeyLength()
except ImportError:
pass
else:
try:
from beaker.crypto.pycrypto import getKeyLength, aesEncrypt, aesDecrypt
keyLength = getKeyLength()
except ImportError:
pass
if not keyLength:
has_aes = False
else:
has_aes = True
if has_aes and keyLength < 32:
warn('Crypto implementation only supports key lengths up to %d bits. '
'Generated session cookies may be incompatible with other '
'environments' % (keyLength * 8))
def generateCryptoKeys(master_key, salt, iterations):
# NB: We XOR parts of the keystream into the randomly-generated parts, just
# in case os.urandom() isn't as random as it should be. Note that if
# os.urandom() returns truly random data, this will have no effect on the
# overall security.
keystream = PBKDF2(master_key, salt, iterations=iterations)
cipher_key = keystream.read(keyLength)
return cipher_key
| Arno-Nymous/pyload | module/lib/beaker/crypto/__init__.py | Python | gpl-3.0 | 1,233 | 0 |
import discord, os, logging
from discord.ext import commands
from .utils import checks
from .utils.dataIO import dataIO
from .utils.chat_formatting import pagify, box
#The Tasty Jaffa
#Requested by Freud
def get_role(ctx, role_id):
roles = set(ctx.message.server.roles)
for role in roles:
if role.id == role_id:
return role
return None
class say:
def __init__(self, bot):
self.bot = bot
self.settings = dataIO.load_json("data/Tasty/say/settings.json")
print("Testing values in data/Tasty/say")
for server in self.bot.servers:
try:
self.settings[server.id]["ROLE"]
self.settings[server.id]["USERS"]
except:
self.settings[server.id] = {}
self.settings[server.id]["ROLE"] = None
self.settings[server.id]["USERS"] = []
@commands.group(name="setsay", pass_context=True, no_pm=True, invoke_without_command=True)
async def sayset(self, ctx):
"""The 'Say' command set
add - Adds a user to have the abillity to use the speak command
list - list users allowed and permited role
remove - Removes a user to have the abillity to use the speak command
role - Adds a permited role to use the speak command"""
if ctx.invoked_subcommand is None:
await self.bot.send_message(ctx.message.channel, "```Please use the speak command with: \n add - Adds a **user** to have the abillity to use the speak command \n remove - Removes a **user** to have the abillity to use the speak command \n role - Adds a role and those with it can use the speak command \n list - lists permited users and the permited role```")
@sayset.command(name="list", pass_context=True)
@checks.admin_or_permissions()
async def say_list(self,ctx):
"""Lists permited users and the permitted role"""
names = []
for user_id in self.settings[ctx.message.server.id]["USERS"]:
names.append(discord.utils.get(self.bot.get_all_members(), id=user_id).name)
msg = ("+ Permited\n"
"{}\n\n"
"".format(", ".join(sorted(names))))
for page in pagify(msg, [" "], shorten_by=16):
await self.bot.say(box(page.lstrip(" "), lang="diff"))
#gets the name of the role and displays it
if self.settings[ctx.message.server.id]["ROLE"] is not None:
await self.bot.send_message(ctx.message.channel, "Permited Role: **{}**".format(get_role(ctx, self.settings[ctx.message.server.id]["ROLE"]).name))
else:
await self.bot.send_message(ctx.message.channel, "No role has permission")
@sayset.command(name="add", pass_context=True, no_pm=True)
@checks.admin_or_permissions()
async def say_add (self, ctx, user: discord.Member):
"""Adds a [user] to have the abillity to use the speak command"""
self.settings[ctx.message.server.id]["USERS"].append(user.id)
self.save()
await self.bot.send_message(ctx.message.channel, "Done!")
@sayset.command(name="remove", pass_context=True, no_pm=True)
@checks.admin_or_permissions()
async def say_remove (self, ctx, user: discord.Member):
"""Removes a [user] to have the abillity to use the speak command"""
try:
self.settings[ctx.message.server.id]["USERS"].remove(user.id)
self.save()
await self.bot.send_message(ctx.message.channel, "Done!")
except:
await self.bot.send_message(ctx.message.channel, "Are you sure that {} had the permision in the first place?".format(user.mention))
@sayset.command(name="role", pass_context=True)
@checks.admin_or_permissions()
async def say_role(self, ctx, role_name:str):
"""Sets the permitted role"""
role = discord.utils.get(ctx.message.server.roles, name=role_name)
if role is not None:
self.settings[ctx.message.server.id]["ROLE"] = role.id
self.save()
await self.bot.send_message(ctx.message.channel, "Role added!")
else:
await self.bot.send_message(ctx.message.channel, "Role not found!")
@commands.command(name="speak", pass_context=True, no_pm =True)
async def bot_say(self, ctx, *, text):
"""The bot repeats what you tell it to"""
if '@everyone' in ctx.message.content and '@here' in ctx.message.content:
await self.bot.send_message(ctx.message.channel, "Woh! {}, please don't do that".format(ctx.message.author.mention))
return
#IF there are no mentions such as @everyone or @here must test useing a string
if ctx.message.channel.permissions_for(ctx.message.server.me).manage_messages is not True:
await self.bot.say("This command requires the **Manage Messages** permission.")
return
#checks if they are allowed (role or permitted)
if ctx.message.author.id in self.settings[ctx.message.server.id]["USERS"] or get_role(ctx, self.settings[ctx.message.server.id]["ROLE"]) in ctx.message.author.roles:
await self.bot.delete_message(ctx.message)
await self.bot.send_message(ctx.message.channel, text)
else:
await self.bot.say("You need to be given access to this command")
def save(self):
dataIO.save_json("data/Tasty/say/settings.json", self.settings)
async def server_join(self, server):
self.settings[server.id]={
"ROLE":None,
"USERS":[],
}
self.save()
def check_folders(): #Creates a folder
if not os.path.exists("data/Tasty/say"):
print("Creating data/Tasty/say folder...")
os.makedirs("data/Tasty/say")
def check_files(): #Creates json files in the folder
if not dataIO.is_valid_json("data/Tasty/say/settings.json"):
print("Creating empty settings.json...")
dataIO.save_json("data/Tasty/say/settings.json", {})
def setup(bot):
check_folders()
check_files()
n = say(bot)
bot.add_listener(n.server_join, "on_server_join")
bot.add_cog(n)
| The-Tasty-Jaffa/Tasty-Jaffa-cogs | say/say.py | Python | gpl-3.0 | 6,218 | 0.008363 |
# coding: utf-8
from boto.s3.bucket import Bucket
from thumbor.utils import logger
from tornado.concurrent import return_future
import urllib2
import thumbor.loaders.http_loader as http_loader
from tc_aws.aws.connection import get_connection
def _get_bucket(url, root_path=None):
"""
Returns a tuple containing bucket name and bucket path.
url: A string of the format /bucket.name/file/path/in/bucket
"""
url_by_piece = url.lstrip("/").split("/")
bucket_name = url_by_piece[0]
if root_path is not None:
url_by_piece[0] = root_path
else:
url_by_piece = url_by_piece[1:]
bucket_path = "/".join(url_by_piece)
return bucket_name, bucket_path
def _normalize_url(url):
"""
:param url:
:return: exactly the same url since we only use http loader if url stars with http prefix.
"""
return url
def _validate_bucket(context, bucket):
allowed_buckets = context.config.get('S3_ALLOWED_BUCKETS', default=None)
return not allowed_buckets or bucket in allowed_buckets
@return_future
def load(context, url, callback):
enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False)
if enable_http_loader and url.startswith('http'):
return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url)
url = urllib2.unquote(url)
bucket = context.config.get('S3_LOADER_BUCKET', default=None)
if not bucket:
bucket, url = _get_bucket(url, root_path=context.config.S3_LOADER_ROOT_PATH)
if _validate_bucket(context, bucket):
bucket_loader = Bucket(
connection=get_connection(context),
name=bucket
)
file_key = None
try:
file_key = bucket_loader.get_key(url)
except Exception, e:
logger.warn("ERROR retrieving image from S3 {0}: {1}".format(url, str(e)))
if file_key:
callback(file_key.read())
return
callback(None)
| guilhermef/aws | tc_aws/loaders/s3_loader.py | Python | mit | 2,000 | 0.0025 |
import mock
from mock import call
import time
from typing import Any, Dict, Union, SupportsInt, Text
import gcm
from django.test import TestCase
from django.conf import settings
from zerver.models import PushDeviceToken, UserProfile, Message
from zerver.models import get_user_profile_by_email, receives_online_notifications, \
receives_offline_notifications
from zerver.lib import push_notifications as apn
from zerver.lib.test_classes import (
ZulipTestCase,
)
class MockRedis(object):
data = {} # type: Dict[str, Any]
def hgetall(self, key):
# type: (str) -> Any
return self.data.get(key)
def exists(self, key):
# type: (str) -> bool
return key in self.data
def hmset(self, key, data):
# type: (str, Dict[Any, Any]) -> None
self.data[key] = data
def delete(self, key):
# type: (str) -> None
if self.exists(key):
del self.data[key]
def expire(self, *args, **kwargs):
# type: (*Any, **Any) -> None
pass
class PushNotificationTest(TestCase):
def setUp(self):
# type: () -> None
email = 'hamlet@zulip.com'
apn.connection = apn.get_connection('fake-cert', 'fake-key')
self.redis_client = apn.redis_client = MockRedis() # type: ignore
apn.dbx_connection = apn.get_connection('fake-cert', 'fake-key')
self.user_profile = get_user_profile_by_email(email)
self.tokens = [u'aaaa', u'bbbb']
for token in self.tokens:
PushDeviceToken.objects.create(
kind=PushDeviceToken.APNS,
token=apn.hex_to_b64(token),
user=self.user_profile,
ios_app_id=settings.ZULIP_IOS_APP_ID)
def tearDown(self):
# type: () -> None
for i in [100, 200]:
self.redis_client.delete(apn.get_apns_key(i))
class APNsMessageTest(PushNotificationTest):
@mock.patch('random.getrandbits', side_effect=[100, 200])
def test_apns_message(self, mock_getrandbits):
# type: (mock.MagicMock) -> None
apn.APNsMessage(self.user_profile.id, self.tokens, alert="test")
data = self.redis_client.hgetall(apn.get_apns_key(100))
self.assertEqual(data['token'], 'aaaa')
self.assertEqual(int(data['user_id']), self.user_profile.id)
data = self.redis_client.hgetall(apn.get_apns_key(200))
self.assertEqual(data['token'], 'bbbb')
self.assertEqual(int(data['user_id']), self.user_profile.id)
class ResponseListenerTest(PushNotificationTest):
def get_error_response(self, **kwargs):
# type: (**Any) -> Dict[str, SupportsInt]
er = {'identifier': 0, 'status': 0} # type: Dict[str, SupportsInt]
er.update({k: v for k, v in kwargs.items() if k in er})
return er
def get_cache_value(self):
# type: () -> Dict[str, Union[str, int]]
return {'token': 'aaaa', 'user_id': self.user_profile.id}
@mock.patch('logging.warn')
def test_cache_does_not_exist(self, mock_warn):
# type: (mock.MagicMock) -> None
err_rsp = self.get_error_response(identifier=100, status=1)
apn.response_listener(err_rsp)
msg = "APNs key, apns:100, doesn't not exist."
mock_warn.assert_called_once_with(msg)
@mock.patch('logging.warn')
def test_cache_exists(self, mock_warn):
# type: (mock.MagicMock) -> None
self.redis_client.hmset(apn.get_apns_key(100), self.get_cache_value())
err_rsp = self.get_error_response(identifier=100, status=1)
apn.response_listener(err_rsp)
b64_token = apn.hex_to_b64('aaaa')
errmsg = apn.ERROR_CODES[int(err_rsp['status'])]
msg = ("APNS: Failed to deliver APNS notification to %s, "
"reason: %s" % (b64_token, errmsg))
mock_warn.assert_called_once_with(msg)
@mock.patch('logging.warn')
def test_error_code_eight(self, mock_warn):
# type: (mock.MagicMock) -> None
self.redis_client.hmset(apn.get_apns_key(100), self.get_cache_value())
err_rsp = self.get_error_response(identifier=100, status=8)
b64_token = apn.hex_to_b64('aaaa')
self.assertEqual(PushDeviceToken.objects.filter(
user=self.user_profile, token=b64_token).count(), 1)
apn.response_listener(err_rsp)
self.assertEqual(mock_warn.call_count, 2)
self.assertEqual(PushDeviceToken.objects.filter(
user=self.user_profile, token=b64_token).count(), 0)
class TestPushApi(ZulipTestCase):
def test_push_api(self):
# type: () -> None
email = "cordelia@zulip.com"
user = get_user_profile_by_email(email)
self.login(email)
endpoints = [
('/json/users/me/apns_device_token', 'apple-token'),
('/json/users/me/android_gcm_reg_id', 'android-token'),
]
# Test error handling
for endpoint, _ in endpoints:
# Try adding/removing tokens that are too big...
broken_token = "x" * 5000 # too big
result = self.client_post(endpoint, {'token': broken_token})
self.assert_json_error(result, 'Empty or invalid length token')
result = self.client_delete(endpoint, {'token': broken_token})
self.assert_json_error(result, 'Empty or invalid length token')
# Try to remove a non-existent token...
result = self.client_delete(endpoint, {'token': 'non-existent token'})
self.assert_json_error(result, 'Token does not exist')
# Add tokens
for endpoint, token in endpoints:
# Test that we can push twice
result = self.client_post(endpoint, {'token': token})
self.assert_json_success(result)
result = self.client_post(endpoint, {'token': token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assertEqual(len(tokens), 1)
self.assertEqual(tokens[0].token, token)
# User should have tokens for both devices now.
tokens = list(PushDeviceToken.objects.filter(user=user))
self.assertEqual(len(tokens), 2)
# Remove tokens
for endpoint, token in endpoints:
result = self.client_delete(endpoint, {'token': token})
self.assert_json_success(result)
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
self.assertEqual(len(tokens), 0)
class SendNotificationTest(PushNotificationTest):
@mock.patch('logging.warn')
@mock.patch('logging.info')
@mock.patch('zerver.lib.push_notifications._do_push_to_apns_service')
def test_send_apple_push_notifiction(self, mock_send, mock_info, mock_warn):
# type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None
def test_send(user_id, message, alert):
# type: (int, Message, str) -> None
self.assertEqual(user_id, self.user_profile.id)
self.assertEqual(set(message.tokens), set(self.tokens))
mock_send.side_effect = test_send
apn.send_apple_push_notification_to_user(self.user_profile, "test alert")
self.assertEqual(mock_send.call_count, 1)
@mock.patch('apns.GatewayConnection.send_notification_multiple')
def test_do_push_to_apns_service(self, mock_push):
# type: (mock.MagicMock) -> None
msg = apn.APNsMessage(self.user_profile.id, self.tokens, alert="test")
def test_push(message):
# type: (Message) -> None
self.assertIs(message, msg.get_frame())
mock_push.side_effect = test_push
apn._do_push_to_apns_service(self.user_profile.id, msg, apn.connection)
@mock.patch('logging.warn')
@mock.patch('logging.info')
@mock.patch('apns.GatewayConnection.send_notification_multiple')
def test_connection_single_none(self, mock_push, mock_info, mock_warn):
# type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None
apn.connection = None
apn.send_apple_push_notification_to_user(self.user_profile, "test alert")
@mock.patch('logging.error')
@mock.patch('apns.GatewayConnection.send_notification_multiple')
def test_connection_both_none(self, mock_push, mock_error):
# type: (mock.MagicMock, mock.MagicMock) -> None
apn.connection = None
apn.dbx_connection = None
apn.send_apple_push_notification_to_user(self.user_profile, "test alert")
class APNsFeedbackTest(PushNotificationTest):
@mock.patch('logging.info')
@mock.patch('apns.FeedbackConnection.items')
def test_feedback(self, mock_items, mock_info):
# type: (mock.MagicMock, mock.MagicMock) -> None
update_time = apn.timestamp_to_datetime(int(time.time()) - 10000)
PushDeviceToken.objects.all().update(last_updated=update_time)
mock_items.return_value = [
('aaaa', int(time.time())),
]
self.assertEqual(PushDeviceToken.objects.all().count(), 2)
apn.check_apns_feedback()
self.assertEqual(PushDeviceToken.objects.all().count(), 1)
class GCMTest(PushNotificationTest):
def setUp(self):
# type: () -> None
super(GCMTest, self).setUp()
apn.gcm = gcm.GCM('fake key')
self.gcm_tokens = [u'1111', u'2222']
for token in self.gcm_tokens:
PushDeviceToken.objects.create(
kind=PushDeviceToken.GCM,
token=apn.hex_to_b64(token),
user=self.user_profile,
ios_app_id=None)
def get_gcm_data(self, **kwargs):
# type: (**Any) -> Dict[str, Any]
data = {
'key 1': 'Data 1',
'key 2': 'Data 2',
}
data.update(kwargs)
return data
class GCMNotSetTest(GCMTest):
@mock.patch('logging.error')
def test_gcm_is_none(self, mock_error):
# type: (mock.MagicMock) -> None
apn.gcm = None
apn.send_android_push_notification_to_user(self.user_profile, {})
mock_error.assert_called_with("Attempting to send a GCM push "
"notification, but no API key was "
"configured")
class GCMSuccessTest(GCMTest):
@mock.patch('logging.warning')
@mock.patch('logging.info')
@mock.patch('gcm.GCM.json_request')
def test_success(self, mock_send, mock_info, mock_warning):
# type: (mock.MagicMock, mock.MagicMock, mock.MagicMock) -> None
res = {}
res['success'] = {token: ind for ind, token in enumerate(self.gcm_tokens)}
mock_send.return_value = res
data = self.get_gcm_data()
apn.send_android_push_notification_to_user(self.user_profile, data)
self.assertEqual(mock_info.call_count, 2)
c1 = call("GCM: Sent 1111 as 0")
c2 = call("GCM: Sent 2222 as 1")
mock_info.assert_has_calls([c1, c2], any_order=True)
mock_warning.assert_not_called()
class GCMCanonicalTest(GCMTest):
@mock.patch('logging.warning')
@mock.patch('gcm.GCM.json_request')
def test_equal(self, mock_send, mock_warning):
# type: (mock.MagicMock, mock.MagicMock) -> None
res = {}
res['canonical'] = {1: 1}
mock_send.return_value = res
data = self.get_gcm_data()
apn.send_android_push_notification_to_user(self.user_profile, data)
mock_warning.assert_called_once_with("GCM: Got canonical ref but it "
"already matches our ID 1!")
@mock.patch('logging.warning')
@mock.patch('gcm.GCM.json_request')
def test_pushdevice_not_present(self, mock_send, mock_warning):
# type: (mock.MagicMock, mock.MagicMock) -> None
res = {}
t1 = apn.hex_to_b64(u'1111')
t2 = apn.hex_to_b64(u'3333')
res['canonical'] = {t1: t2}
mock_send.return_value = res
def get_count(hex_token):
# type: (Text) -> int
token = apn.hex_to_b64(hex_token)
return PushDeviceToken.objects.filter(
token=token, kind=PushDeviceToken.GCM).count()
self.assertEqual(get_count(u'1111'), 1)
self.assertEqual(get_count(u'3333'), 0)
data = self.get_gcm_data()
apn.send_android_push_notification_to_user(self.user_profile, data)
msg = ("GCM: Got canonical ref %s "
"replacing %s but new ID not "
"registered! Updating.")
mock_warning.assert_called_once_with(msg % (t2, t1))
self.assertEqual(get_count(u'1111'), 0)
self.assertEqual(get_count(u'3333'), 1)
@mock.patch('logging.info')
@mock.patch('gcm.GCM.json_request')
def test_pushdevice_different(self, mock_send, mock_info):
# type: (mock.MagicMock, mock.MagicMock) -> None
res = {}
old_token = apn.hex_to_b64(u'1111')
new_token = apn.hex_to_b64(u'2222')
res['canonical'] = {old_token: new_token}
mock_send.return_value = res
def get_count(hex_token):
# type: (Text) -> int
token = apn.hex_to_b64(hex_token)
return PushDeviceToken.objects.filter(
token=token, kind=PushDeviceToken.GCM).count()
self.assertEqual(get_count(u'1111'), 1)
self.assertEqual(get_count(u'2222'), 1)
data = self.get_gcm_data()
apn.send_android_push_notification_to_user(self.user_profile, data)
mock_info.assert_called_once_with(
"GCM: Got canonical ref %s, dropping %s" % (new_token, old_token))
self.assertEqual(get_count(u'1111'), 0)
self.assertEqual(get_count(u'2222'), 1)
class GCMNotRegisteredTest(GCMTest):
@mock.patch('logging.info')
@mock.patch('gcm.GCM.json_request')
def test_not_registered(self, mock_send, mock_info):
# type: (mock.MagicMock, mock.MagicMock) -> None
res = {}
token = apn.hex_to_b64(u'1111')
res['errors'] = {'NotRegistered': [token]}
mock_send.return_value = res
def get_count(hex_token):
# type: (Text) -> int
token = apn.hex_to_b64(hex_token)
return PushDeviceToken.objects.filter(
token=token, kind=PushDeviceToken.GCM).count()
self.assertEqual(get_count(u'1111'), 1)
data = self.get_gcm_data()
apn.send_android_push_notification_to_user(self.user_profile, data)
mock_info.assert_called_once_with("GCM: Removing %s" % (token,))
self.assertEqual(get_count(u'1111'), 0)
class GCMFailureTest(GCMTest):
@mock.patch('logging.warning')
@mock.patch('gcm.GCM.json_request')
def test_failure(self, mock_send, mock_warn):
# type: (mock.MagicMock, mock.MagicMock) -> None
res = {}
token = apn.hex_to_b64(u'1111')
res['errors'] = {'Failed': [token]}
mock_send.return_value = res
data = self.get_gcm_data()
apn.send_android_push_notification_to_user(self.user_profile, data)
c1 = call("GCM: Delivery to %s failed: Failed" % (token,))
mock_warn.assert_has_calls([c1], any_order=True)
class TestReceivesNotificationsFunctions(ZulipTestCase):
def setUp(self):
# type: () -> None
email = "cordelia@zulip.com"
self.user = get_user_profile_by_email(email)
def test_receivers_online_notifications_when_user_is_a_bot(self):
# type: () -> None
self.user.is_bot = True
self.user.enable_online_push_notifications = True
self.assertFalse(receives_online_notifications(self.user))
self.user.enable_online_push_notifications = False
self.assertFalse(receives_online_notifications(self.user))
def test_receivers_online_notifications_when_user_is_not_a_bot(self):
# type: () -> None
self.user.is_bot = False
self.user.enable_online_push_notifications = True
self.assertTrue(receives_online_notifications(self.user))
self.user.enable_online_push_notifications = False
self.assertFalse(receives_online_notifications(self.user))
def test_receivers_offline_notifications_when_user_is_a_bot(self):
# type: () -> None
self.user.is_bot = True
self.user.enable_offline_email_notifications = True
self.user.enable_offline_push_notifications = True
self.assertFalse(receives_offline_notifications(self.user))
self.user.enable_offline_email_notifications = False
self.user.enable_offline_push_notifications = False
self.assertFalse(receives_offline_notifications(self.user))
self.user.enable_offline_email_notifications = True
self.user.enable_offline_push_notifications = False
self.assertFalse(receives_offline_notifications(self.user))
self.user.enable_offline_email_notifications = False
self.user.enable_offline_push_notifications = True
self.assertFalse(receives_offline_notifications(self.user))
def test_receivers_offline_notifications_when_user_is_not_a_bot(self):
# type: () -> None
self.user.is_bot = False
self.user.enable_offline_email_notifications = True
self.user.enable_offline_push_notifications = True
self.assertTrue(receives_offline_notifications(self.user))
self.user.enable_offline_email_notifications = False
self.user.enable_offline_push_notifications = False
self.assertFalse(receives_offline_notifications(self.user))
self.user.enable_offline_email_notifications = True
self.user.enable_offline_push_notifications = False
self.assertTrue(receives_offline_notifications(self.user))
self.user.enable_offline_email_notifications = False
self.user.enable_offline_push_notifications = True
self.assertTrue(receives_offline_notifications(self.user))
| dawran6/zulip | zerver/tests/test_push_notifications.py | Python | apache-2.0 | 17,956 | 0.001337 |
from ccswm.statApi.models import *
from rest_framework import serializers
class EpisodeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Episode
fields = ('id', 'url', 'season', 'episodeNumber', 'location')
class StarterSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Starter
fields = ('id', 'url', 'protein', 'proteinStyle', 'side', 'sideStyle')
class EntreeSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Entree
fields = ('id', 'url', 'protein', 'proteinStyle', 'side', 'sideStyle')
class DessertSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Dessert
fields = ('id', 'url', 'main', 'secondary')
class EntertainmentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Entertainment
fields = ('id', 'url', 'description')
class CoupleMealSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CoupleMeal
fields = ('id', 'url', 'starter', 'entree', 'dessert', 'entertainment')
class CoupleSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Couple
fields = ('id', 'url', 'nightNumber', 'ageRange', 'sexPref', 'mrtlStat', 'mrtlLength', 'theme', 'foodEth', 'episode', 'coupleMeal')
class ResultsSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Results
fields = ('id', 'url', 'couple', 'oppAVote', 'oppBVote', 'totalScore', 'outcome')
| RussellRiesJr/CoupleComeStatWithMe | ccswm/statApi/serializers.py | Python | mit | 1,581 | 0.001898 |
from typing import Optional
from fastapi import FastAPI
from pydantic import BaseModel, EmailStr
app = FastAPI()
class UserBase(BaseModel):
username: str
email: EmailStr
full_name: Optional[str] = None
class UserIn(UserBase):
password: str
class UserOut(UserBase):
pass
class UserInDB(UserBase):
hashed_password: str
def fake_password_hasher(raw_password: str):
return "supersecret" + raw_password
def fake_save_user(user_in: UserIn):
hashed_password = fake_password_hasher(user_in.password)
user_in_db = UserInDB(**user_in.dict(), hashed_password=hashed_password)
print("User saved! ..not really")
return user_in_db
@app.post("/user/", response_model=UserOut)
async def create_user(user_in: UserIn):
user_saved = fake_save_user(user_in)
return user_saved
| tiangolo/fastapi | docs_src/extra_models/tutorial002.py | Python | mit | 824 | 0 |
__version__ = '0.1'
from geholproxy import *
from geholexceptions import *
| Psycojoker/geholparser | src/gehol/__init__.py | Python | mit | 76 | 0 |
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
]
### ONE-TRICK PONIES ###
class Hashable:
__metaclass__ = ABCMeta
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
return NotImplemented
class Iterable:
__metaclass__ = ABCMeta
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
Iterable.register(str)
class Iterator(Iterable):
@abstractmethod
def __next__(self):
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if any("next" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Sized:
__metaclass__ = ABCMeta
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Container:
__metaclass__ = ABCMeta
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Callable:
__metaclass__ = ABCMeta
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if any("__call__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other < self
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other <= self
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
def __ne__(self, other):
return not (self == other)
@classmethod
def _from_iterable(cls, it):
'''Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
def isdisjoint(self, other):
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
# Sets are not hashable by default, but subclasses can change this
__hash__ = None
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxint
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
@abstractmethod
def add(self, value):
"""Return True if it was added, False if already there."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Return True if it was deleted, False if not there."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = it.__next__()
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, c):
for value in self:
if value not in c:
self.discard(value)
return self
def __ixor__(self, it):
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
for value in it:
self.discard(value)
return self
MutableSet.register(set)
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def iterkeys(self):
return iter(self)
def itervalues(self):
for key in self:
yield self[key]
def iteritems(self):
for key in self:
yield (key, self[key])
def keys(self):
return list(self)
def items(self):
return [(key, self[key]) for key in self]
def values(self):
return [self[key] for key in self]
# Mappings are not hashable by default, but subclasses can change this
__hash__ = None
def __eq__(self, other):
return isinstance(other, Mapping) and \
dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MappingView(Sized):
def __init__(self, mapping):
self._mapping = mapping
def __len__(self):
return len(self._mapping)
class KeysView(MappingView, Set):
def __contains__(self, key):
return key in self._mapping
def __iter__(self):
for key in self._mapping:
yield key
class ItemsView(MappingView, Set):
def __contains__(self, item):
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
class ValuesView(MappingView):
def __contains__(self, value):
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
class MutableMapping(Mapping):
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(self, other=(), **kwds):
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
MutableMapping.register(dict)
### SEQUENCES ###
class Sequence(Sized, Iterable, Container):
"""All the operations on a read-only sequence.
Concrete subclasses must override __new__ or __init__,
__getitem__, and __len__.
"""
@abstractmethod
def __getitem__(self, index):
raise IndexError
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value):
for i, v in enumerate(self):
if v == value:
return i
raise ValueError
def count(self, value):
return sum(1 for v in self if v == value)
Sequence.register(tuple)
Sequence.register(basestring)
Sequence.register(buffer)
class MutableSequence(Sequence):
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@abstractmethod
def __delitem__(self, index):
raise IndexError
@abstractmethod
def insert(self, index, value):
raise IndexError
def append(self, value):
self.insert(len(self), value)
def reverse(self):
n = len(self)
for i in range(n//2):
self[i], self[n-i-1] = self[n-i-1], self[i]
def extend(self, values):
for v in values:
self.append(v)
def pop(self, index=-1):
v = self[index]
del self[index]
return v
def remove(self, value):
del self[self.index(value)]
def __iadd__(self, values):
self.extend(values)
MutableSequence.register(list)
| tempbottle/restcommander | play-1.2.4/python/Lib/_abcoll.py | Python | apache-2.0 | 13,666 | 0.000951 |
# Authors:
# Jason Gerard DeRose <jderose@redhat.com>
# Pavel Zuna <pzuna@redhat.com>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ipaclient.frontend import MethodOverride
from ipalib import errors
from ipalib import Flag
from ipalib import util
from ipalib.plugable import Registry
from ipalib import _
from ipalib import x509
register = Registry()
@register(override=True, no_fail=True)
class user_del(MethodOverride):
def get_options(self):
for option in super(user_del, self).get_options():
yield option
yield Flag(
'preserve?',
include='cli',
doc=_('Delete a user, keeping the entry available for future use'),
)
yield Flag(
'no_preserve?',
include='cli',
doc=_('Delete a user'),
)
def forward(self, *keys, **options):
if self.api.env.context == 'cli':
no_preserve = options.pop('no_preserve', False)
preserve = options.pop('preserve', False)
if no_preserve and preserve:
raise errors.MutuallyExclusiveError(
reason=_("preserve and no-preserve cannot be both set"))
elif no_preserve:
options['preserve'] = False
elif preserve:
options['preserve'] = True
return super(user_del, self).forward(*keys, **options)
@register(override=True, no_fail=True)
class user_show(MethodOverride):
def forward(self, *keys, **options):
if 'out' in options:
util.check_writable_file(options['out'])
result = super(user_show, self).forward(*keys, **options)
if 'usercertificate' in result['result']:
certs = (x509.load_der_x509_certificate(c)
for c in result['result']['usercertificate'])
x509.write_certificate_list(certs, options['out'])
result['summary'] = (
_('Certificate(s) stored in file \'%(file)s\'')
% dict(file=options['out'])
)
return result
else:
raise errors.NoCertificateError(entry=keys[-1])
else:
return super(user_show, self).forward(*keys, **options)
| encukou/freeipa | ipaclient/plugins/user.py | Python | gpl-3.0 | 2,966 | 0 |
# -*- coding: utf-8 -*-
class Charset(object):
common_name = 'NotoSansOldItalic-Regular'
native_name = ''
def glyphs(self):
chars = []
chars.append(0x0000) #uniFEFF ????
chars.append(0x0020) #uni00A0 SPACE
chars.append(0x00A0) #uni00A0 NO-BREAK SPACE
chars.append(0x000D) #uni000D ????
chars.append(0xFEFF) #uniFEFF ZERO WIDTH NO-BREAK SPACE
chars.append(0x0000) #uniFEFF ????
chars.append(0x10301) #glyph00005 OLD ITALIC LETTER BE
chars.append(0x10302) #glyph00006 OLD ITALIC LETTER KE
chars.append(0x10303) #glyph00007 OLD ITALIC LETTER DE
chars.append(0x10304) #glyph00008 OLD ITALIC LETTER E
chars.append(0x10305) #glyph00009 OLD ITALIC LETTER VE
chars.append(0x10306) #glyph00010 OLD ITALIC LETTER ZE
chars.append(0x10307) #glyph00011 OLD ITALIC LETTER HE
chars.append(0x10308) #glyph00012 OLD ITALIC LETTER THE
chars.append(0x10309) #glyph00013 OLD ITALIC LETTER I
chars.append(0x1030A) #glyph00014 OLD ITALIC LETTER KA
chars.append(0x1030B) #glyph00015 OLD ITALIC LETTER EL
chars.append(0x1030C) #glyph00016 OLD ITALIC LETTER EM
chars.append(0x000D) #uni000D ????
chars.append(0x1030E) #glyph00018 OLD ITALIC LETTER ESH
chars.append(0x1030F) #glyph00019 OLD ITALIC LETTER O
chars.append(0x10310) #glyph00020 OLD ITALIC LETTER PE
chars.append(0x10311) #glyph00021 OLD ITALIC LETTER SHE
chars.append(0x10312) #glyph00022 OLD ITALIC LETTER KU
chars.append(0x10313) #glyph00023 OLD ITALIC LETTER ER
chars.append(0x10314) #glyph00024 OLD ITALIC LETTER ES
chars.append(0x10315) #glyph00025 OLD ITALIC LETTER TE
chars.append(0x10316) #glyph00026 OLD ITALIC LETTER U
chars.append(0x10317) #glyph00027 OLD ITALIC LETTER EKS
chars.append(0x10318) #glyph00028 OLD ITALIC LETTER PHE
chars.append(0x10319) #glyph00029 OLD ITALIC LETTER KHE
chars.append(0x1031A) #glyph00030 OLD ITALIC LETTER EF
chars.append(0x1031B) #glyph00031 OLD ITALIC LETTER ERS
chars.append(0x1031C) #glyph00032 OLD ITALIC LETTER CHE
chars.append(0x1031D) #glyph00033 OLD ITALIC LETTER II
chars.append(0x10300) #glyph00004 OLD ITALIC LETTER A
chars.append(0x0020) #uni00A0 SPACE
chars.append(0x10321) #glyph00036 OLD ITALIC NUMERAL FIVE
chars.append(0x10322) #glyph00037 OLD ITALIC NUMERAL TEN
chars.append(0x10323) #glyph00038 OLD ITALIC NUMERAL FIFTY
chars.append(0x1031E) #glyph00034 OLD ITALIC LETTER UU
chars.append(0x00A0) #uni00A0 NO-BREAK SPACE
chars.append(0x1030D) #glyph00017 OLD ITALIC LETTER EN
chars.append(0x10320) #glyph00035 OLD ITALIC NUMERAL ONE
chars.append(0xFEFF) #uniFEFF ZERO WIDTH NO-BREAK SPACE
return chars
| davelab6/pyfontaine | fontaine/charsets/noto_chars/notosansolditalic_regular.py | Python | gpl-3.0 | 2,939 | 0.015652 |
from __future__ import division, absolute_import, print_function
import re
import os
import sys
import warnings
import platform
import tempfile
from subprocess import Popen, PIPE, STDOUT
from numpy.distutils.fcompiler import FCompiler
from numpy.distutils.exec_command import exec_command
from numpy.distutils.misc_util import msvc_runtime_library
from numpy.distutils.compat import get_exception
compilers = ['GnuFCompiler', 'Gnu95FCompiler']
TARGET_R = re.compile("Target: ([a-zA-Z0-9_\-]*)")
# XXX: handle cross compilation
def is_win64():
return sys.platform == "win32" and platform.architecture()[0] == "64bit"
if is_win64():
#_EXTRAFLAGS = ["-fno-leading-underscore"]
_EXTRAFLAGS = []
else:
_EXTRAFLAGS = []
class GnuFCompiler(FCompiler):
compiler_type = 'gnu'
compiler_aliases = ('g77',)
description = 'GNU Fortran 77 compiler'
def gnu_version_match(self, version_string):
"""Handle the different versions of GNU fortran compilers"""
# Strip warning(s) that may be emitted by gfortran
while version_string.startswith('gfortran: warning'):
version_string = version_string[version_string.find('\n')+1:]
# Gfortran versions from after 2010 will output a simple string
# (usually "x.y", "x.y.z" or "x.y.z-q") for ``-dumpversion``; older
# gfortrans may still return long version strings (``-dumpversion`` was
# an alias for ``--version``)
if len(version_string) <= 20:
# Try to find a valid version string
m = re.search(r'([0-9.]+)', version_string)
if m:
# g77 provides a longer version string that starts with GNU
# Fortran
if version_string.startswith('GNU Fortran'):
return ('g77', m.group(1))
# gfortran only outputs a version string such as #.#.#, so check
# if the match is at the start of the string
elif m.start() == 0:
return ('gfortran', m.group(1))
else:
# Output probably from --version, try harder:
m = re.search(r'GNU Fortran\s+95.*?([0-9-.]+)', version_string)
if m:
return ('gfortran', m.group(1))
m = re.search(r'GNU Fortran.*?\-?([0-9-.]+)', version_string)
if m:
v = m.group(1)
if v.startswith('0') or v.startswith('2') or v.startswith('3'):
# the '0' is for early g77's
return ('g77', v)
else:
# at some point in the 4.x series, the ' 95' was dropped
# from the version string
return ('gfortran', v)
# If still nothing, raise an error to make the problem easy to find.
err = 'A valid Fortran version was not found in this string:\n'
raise ValueError(err + version_string)
def version_match(self, version_string):
v = self.gnu_version_match(version_string)
if not v or v[0] != 'g77':
return None
return v[1]
possible_executables = ['g77', 'f77']
executables = {
'version_cmd' : [None, "-dumpversion"],
'compiler_f77' : [None, "-g", "-Wall", "-fno-second-underscore"],
'compiler_f90' : None, # Use --fcompiler=gnu95 for f90 codes
'compiler_fix' : None,
'linker_so' : [None, "-g", "-Wall"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-g", "-Wall"]
}
module_dir_switch = None
module_include_switch = None
# Cygwin: f771: warning: -fPIC ignored for target (all code is
# position independent)
if os.name != 'nt' and sys.platform != 'cygwin':
pic_flags = ['-fPIC']
# use -mno-cygwin for g77 when Python is not Cygwin-Python
if sys.platform == 'win32':
for key in ['version_cmd', 'compiler_f77', 'linker_so', 'linker_exe']:
executables[key].append('-mno-cygwin')
g2c = 'g2c'
suggested_f90_compiler = 'gnu95'
def get_flags_linker_so(self):
opt = self.linker_so[1:]
if sys.platform == 'darwin':
target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
# If MACOSX_DEPLOYMENT_TARGET is set, we simply trust the value
# and leave it alone. But, distutils will complain if the
# environment's value is different from the one in the Python
# Makefile used to build Python. We let disutils handle this
# error checking.
if not target:
# If MACOSX_DEPLOYMENT_TARGET is not set in the environment,
# we try to get it first from the Python Makefile and then we
# fall back to setting it to 10.3 to maximize the set of
# versions we can work with. This is a reasonable default
# even when using the official Python dist and those derived
# from it.
import distutils.sysconfig as sc
g = {}
try:
get_makefile_filename = sc.get_makefile_filename
except AttributeError:
pass # i.e. PyPy
else:
filename = get_makefile_filename()
sc.parse_makefile(filename, g)
target = g.get('MACOSX_DEPLOYMENT_TARGET', '10.3')
os.environ['MACOSX_DEPLOYMENT_TARGET'] = target
if target == '10.3':
s = 'Env. variable MACOSX_DEPLOYMENT_TARGET set to 10.3'
warnings.warn(s, stacklevel=2)
opt.extend(['-undefined', 'dynamic_lookup', '-bundle'])
else:
opt.append("-shared")
if sys.platform.startswith('sunos'):
# SunOS often has dynamically loaded symbols defined in the
# static library libg2c.a The linker doesn't like this. To
# ignore the problem, use the -mimpure-text flag. It isn't
# the safest thing, but seems to work. 'man gcc' says:
# ".. Instead of using -mimpure-text, you should compile all
# source code with -fpic or -fPIC."
opt.append('-mimpure-text')
return opt
def get_libgcc_dir(self):
status, output = exec_command(self.compiler_f77 +
['-print-libgcc-file-name'],
use_tee=0)
if not status:
return os.path.dirname(output)
return None
def get_library_dirs(self):
opt = []
if sys.platform[:5] != 'linux':
d = self.get_libgcc_dir()
if d:
# if windows and not cygwin, libg2c lies in a different folder
if sys.platform == 'win32' and not d.startswith('/usr/lib'):
d = os.path.normpath(d)
path = os.path.join(d, "lib%s.a" % self.g2c)
if not os.path.exists(path):
root = os.path.join(d, *((os.pardir,)*4))
d2 = os.path.abspath(os.path.join(root, 'lib'))
path = os.path.join(d2, "lib%s.a" % self.g2c)
if os.path.exists(path):
opt.append(d2)
opt.append(d)
return opt
def get_libraries(self):
opt = []
d = self.get_libgcc_dir()
if d is not None:
g2c = self.g2c + '-pic'
f = self.static_lib_format % (g2c, self.static_lib_extension)
if not os.path.isfile(os.path.join(d, f)):
g2c = self.g2c
else:
g2c = self.g2c
if g2c is not None:
opt.append(g2c)
c_compiler = self.c_compiler
if sys.platform == 'win32' and c_compiler and \
c_compiler.compiler_type == 'msvc':
# the following code is not needed (read: breaks) when using MinGW
# in case want to link F77 compiled code with MSVC
opt.append('gcc')
runtime_lib = msvc_runtime_library()
if runtime_lib:
opt.append(runtime_lib)
if sys.platform == 'darwin':
opt.append('cc_dynamic')
return opt
def get_flags_debug(self):
return ['-g']
def get_flags_opt(self):
v = self.get_version()
if v and v <= '3.3.3':
# With this compiler version building Fortran BLAS/LAPACK
# with -O3 caused failures in lib.lapack heevr,syevr tests.
opt = ['-O2']
else:
opt = ['-O3']
opt.append('-funroll-loops')
return opt
def _c_arch_flags(self):
""" Return detected arch flags from CFLAGS """
from distutils import sysconfig
try:
cflags = sysconfig.get_config_vars()['CFLAGS']
except KeyError:
return []
arch_re = re.compile(r"-arch\s+(\w+)")
arch_flags = []
for arch in arch_re.findall(cflags):
arch_flags += ['-arch', arch]
return arch_flags
def get_flags_arch(self):
return []
def runtime_library_dir_option(self, dir):
sep = ',' if sys.platform == 'darwin' else '='
return '-Wl,-rpath%s"%s"' % (sep, dir)
class Gnu95FCompiler(GnuFCompiler):
compiler_type = 'gnu95'
compiler_aliases = ('gfortran',)
description = 'GNU Fortran 95 compiler'
def version_match(self, version_string):
v = self.gnu_version_match(version_string)
if not v or v[0] != 'gfortran':
return None
v = v[1]
if v >= '4.':
# gcc-4 series releases do not support -mno-cygwin option
pass
else:
# use -mno-cygwin flag for gfortran when Python is not
# Cygwin-Python
if sys.platform == 'win32':
for key in ['version_cmd', 'compiler_f77', 'compiler_f90',
'compiler_fix', 'linker_so', 'linker_exe']:
self.executables[key].append('-mno-cygwin')
return v
possible_executables = ['gfortran', 'f95']
executables = {
'version_cmd' : ["<F90>", "-dumpversion"],
'compiler_f77' : [None, "-Wall", "-g", "-ffixed-form",
"-fno-second-underscore"] + _EXTRAFLAGS,
'compiler_f90' : [None, "-Wall", "-g",
"-fno-second-underscore"] + _EXTRAFLAGS,
'compiler_fix' : [None, "-Wall", "-g","-ffixed-form",
"-fno-second-underscore"] + _EXTRAFLAGS,
'linker_so' : ["<F90>", "-Wall", "-g"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"],
'linker_exe' : [None, "-Wall"]
}
module_dir_switch = '-J'
module_include_switch = '-I'
g2c = 'gfortran'
def _universal_flags(self, cmd):
"""Return a list of -arch flags for every supported architecture."""
if not sys.platform == 'darwin':
return []
arch_flags = []
# get arches the C compiler gets.
c_archs = self._c_arch_flags()
if "i386" in c_archs:
c_archs[c_archs.index("i386")] = "i686"
# check the arches the Fortran compiler supports, and compare with
# arch flags from C compiler
for arch in ["ppc", "i686", "x86_64", "ppc64"]:
if _can_target(cmd, arch) and arch in c_archs:
arch_flags.extend(["-arch", arch])
return arch_flags
def get_flags(self):
flags = GnuFCompiler.get_flags(self)
arch_flags = self._universal_flags(self.compiler_f90)
if arch_flags:
flags[:0] = arch_flags
return flags
def get_flags_linker_so(self):
flags = GnuFCompiler.get_flags_linker_so(self)
arch_flags = self._universal_flags(self.linker_so)
if arch_flags:
flags[:0] = arch_flags
return flags
def get_library_dirs(self):
opt = GnuFCompiler.get_library_dirs(self)
if sys.platform == 'win32':
c_compiler = self.c_compiler
if c_compiler and c_compiler.compiler_type == "msvc":
target = self.get_target()
if target:
d = os.path.normpath(self.get_libgcc_dir())
root = os.path.join(d, *((os.pardir,)*4))
path = os.path.join(root, "lib")
mingwdir = os.path.normpath(path)
if os.path.exists(os.path.join(mingwdir, "libmingwex.a")):
opt.append(mingwdir)
return opt
def get_libraries(self):
opt = GnuFCompiler.get_libraries(self)
if sys.platform == 'darwin':
opt.remove('cc_dynamic')
if sys.platform == 'win32':
c_compiler = self.c_compiler
if c_compiler and c_compiler.compiler_type == "msvc":
if "gcc" in opt:
i = opt.index("gcc")
opt.insert(i+1, "mingwex")
opt.insert(i+1, "mingw32")
# XXX: fix this mess, does not work for mingw
if is_win64():
c_compiler = self.c_compiler
if c_compiler and c_compiler.compiler_type == "msvc":
return []
else:
pass
return opt
def get_target(self):
status, output = exec_command(self.compiler_f77 +
['-v'],
use_tee=0)
if not status:
m = TARGET_R.search(output)
if m:
return m.group(1)
return ""
def get_flags_opt(self):
if is_win64():
return ['-O0']
else:
return GnuFCompiler.get_flags_opt(self)
def _can_target(cmd, arch):
"""Return true if the architecture supports the -arch flag"""
newcmd = cmd[:]
fid, filename = tempfile.mkstemp(suffix=".f")
os.close(fid)
try:
d = os.path.dirname(filename)
output = os.path.splitext(filename)[0] + ".o"
try:
newcmd.extend(["-arch", arch, "-c", filename])
p = Popen(newcmd, stderr=STDOUT, stdout=PIPE, cwd=d)
p.communicate()
return p.returncode == 0
finally:
if os.path.exists(output):
os.remove(output)
finally:
os.remove(filename)
return False
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
compiler = GnuFCompiler()
compiler.customize()
print(compiler.get_version())
try:
compiler = Gnu95FCompiler()
compiler.customize()
print(compiler.get_version())
except Exception:
msg = get_exception()
print(msg)
| maniteja123/numpy | numpy/distutils/fcompiler/gnu.py | Python | bsd-3-clause | 14,957 | 0.001872 |
# Copyright (c) 2016 Stratoscale, Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
import webob
from cinder.api.v3 import router as router_v3
from cinder import context
from cinder import objects
from cinder import test
from cinder.tests.unit.api.contrib import test_volume_manage as test_contrib
from cinder.tests.unit.api import fakes
from cinder.tests.unit import fake_constants as fake
CONF = cfg.CONF
def app():
# no auth, just let environ['cinder.context'] pass through
api = router_v3.APIRouter()
mapper = fakes.urlmap.URLMap()
mapper['/v3'] = api
return mapper
@ddt.ddt
@mock.patch('cinder.objects.service.Service.get_by_host_and_topic',
test_contrib.service_get)
@mock.patch('cinder.volume.volume_types.get_volume_type_by_name',
test_contrib.vt_get_volume_type_by_name)
@mock.patch('cinder.volume.volume_types.get_volume_type',
test_contrib.vt_get_volume_type)
class VolumeManageTest(test.TestCase):
"""Test cases for cinder/api/v3/volume_manage.py"""
def setUp(self):
super(VolumeManageTest, self).setUp()
self._admin_ctxt = context.RequestContext(fake.USER_ID,
fake.PROJECT_ID,
True)
def _get_resp_post(self, body, version="3.8"):
"""Helper to execute a POST manageable_volumes API call."""
req = webob.Request.blank('/v3/%s/manageable_volumes' %
fake.PROJECT_ID)
req.method = 'POST'
req.headers['Content-Type'] = 'application/json'
req.headers['OpenStack-API-Version'] = 'volume ' + version
req.environ['cinder.context'] = self._admin_ctxt
req.body = jsonutils.dump_as_bytes(body)
res = req.get_response(app())
return res
@mock.patch('cinder.volume.api.API.manage_existing',
wraps=test_contrib.api_manage)
@mock.patch(
'cinder.api.openstack.wsgi.Controller.validate_name_and_description')
def test_manage_volume_route(self, mock_validate, mock_api_manage):
"""Test call to manage volume.
There is currently no change between the API in contrib and the API in
v3, so here we simply check that the call is routed properly, rather
than copying all the tests.
"""
body = {'volume': {'host': 'host_ok', 'ref': 'fake_ref'}}
res = self._get_resp_post(body)
self.assertEqual(202, res.status_int, res)
def test_manage_volume_previous_version(self):
body = {'volume': {'host': 'host_ok', 'ref': 'fake_ref'}}
res = self._get_resp_post(body)
self.assertEqual(400, res.status_int, res)
def _get_resp_get(self, host, detailed, paging, version="3.8", **kwargs):
"""Helper to execute a GET os-volume-manage API call."""
params = {'host': host} if host else {}
params.update(kwargs)
if paging:
params.update({'marker': '1234', 'limit': 10,
'offset': 4, 'sort': 'reference:asc'})
query_string = "?%s" % urlencode(params)
detail = ""
if detailed:
detail = "/detail"
req = webob.Request.blank('/v3/%s/manageable_volumes%s%s' %
(fake.PROJECT_ID, detail, query_string))
req.method = 'GET'
req.headers['Content-Type'] = 'application/json'
req.headers['OpenStack-API-Version'] = 'volume ' + version
req.environ['cinder.context'] = self._admin_ctxt
res = req.get_response(app())
return res
@mock.patch('cinder.volume.api.API.get_manageable_volumes',
wraps=test_contrib.api_get_manageable_volumes)
def test_get_manageable_volumes_route(self, mock_api_manageable):
"""Test call to get manageable volumes.
There is currently no change between the API in contrib and the API in
v3, so here we simply check that the call is routed properly, rather
than copying all the tests.
"""
res = self._get_resp_get('fakehost', False, True)
self.assertEqual(200, res.status_int)
def test_get_manageable_volumes_previous_version(self):
res = self._get_resp_get('fakehost', False, True, version="3.7")
self.assertEqual(404, res.status_int)
@mock.patch('cinder.volume.api.API.get_manageable_volumes',
wraps=test_contrib.api_get_manageable_volumes)
def test_get_manageable_volumes_detail_route(self, mock_api_manageable):
"""Test call to get manageable volumes (detailed).
There is currently no change between the API in contrib and the API in
v3, so here we simply check that the call is routed properly, rather
than copying all the tests.
"""
res = self._get_resp_get('fakehost', True, False)
self.assertEqual(200, res.status_int)
def test_get_manageable_volumes_detail_previous_version(self):
res = self._get_resp_get('fakehost', True, False, version="3.7")
self.assertEqual(404, res.status_int)
@ddt.data((True, True, 'detail_list'), (True, False, 'summary_list'),
(False, True, 'detail_list'), (False, False, 'summary_list'))
@ddt.unpack
@mock.patch('cinder.objects.Service.is_up', True)
@mock.patch('cinder.volume.rpcapi.VolumeAPI._get_cctxt')
@mock.patch('cinder.objects.Service.get_by_id')
def test_get_manageable_detail(self, clustered, is_detail, view_method,
get_service_mock, get_cctxt_mock):
if clustered:
host = None
cluster_name = 'mycluster'
version = '3.17'
kwargs = {'cluster': cluster_name}
else:
host = 'fakehost'
cluster_name = None
version = '3.8'
kwargs = {}
service = objects.Service(disabled=False, host='fakehost',
cluster_name=cluster_name)
get_service_mock.return_value = service
volumes = [mock.sentinel.volume1, mock.sentinel.volume2]
get_cctxt_mock.return_value.call.return_value = volumes
view_data = {'manageable-volumes': [{'vol': str(v)} for v in volumes]}
view_path = ('cinder.api.views.manageable_volumes.ViewBuilder.' +
view_method)
with mock.patch(view_path, return_value=view_data) as detail_view_mock:
res = self._get_resp_get(host, is_detail, False, version=version,
**kwargs)
self.assertEqual(200, res.status_int)
get_cctxt_mock.assert_called_once_with(service.service_topic_queue,
version=('3.10', '3.0'))
get_cctxt_mock.return_value.call.assert_called_once_with(
mock.ANY, 'get_manageable_volumes', marker=None,
limit=CONF.osapi_max_limit, offset=0, sort_keys=['reference'],
sort_dirs=['desc'], want_objects=True)
detail_view_mock.assert_called_once_with(mock.ANY, volumes,
len(volumes))
get_service_mock.assert_called_once_with(
mock.ANY, None, host=host, binary='cinder-volume',
cluster_name=cluster_name)
@ddt.data('3.8', '3.17')
def test_get_manageable_missing_host(self, version):
res = self._get_resp_get(None, True, False, version=version)
self.assertEqual(400, res.status_int)
def test_get_manageable_both_host_cluster(self):
res = self._get_resp_get('host', True, False, version='3.17',
cluster='cluster')
self.assertEqual(400, res.status_int)
| ge0rgi/cinder | cinder/tests/unit/api/v3/test_volume_manage.py | Python | apache-2.0 | 8,439 | 0 |
"""
Tests for programs celery tasks.
"""
import json
import unittest
from celery.exceptions import MaxRetriesExceededError
import ddt
from django.conf import settings
from django.core.cache import cache
from django.test import override_settings, TestCase
from edx_rest_api_client.client import EdxRestApiClient
from edx_oauth2_provider.tests.factories import ClientFactory
import httpretty
import mock
from provider.constants import CONFIDENTIAL
from lms.djangoapps.certificates.api import MODES
from openedx.core.djangoapps.credentials.tests.mixins import CredentialsApiConfigMixin
from openedx.core.djangoapps.programs.tests import factories
from openedx.core.djangoapps.programs.tests.mixins import ProgramsApiConfigMixin
from openedx.core.djangoapps.programs.tasks.v1 import tasks
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase
from student.tests.factories import UserFactory
TASKS_MODULE = 'openedx.core.djangoapps.programs.tasks.v1.tasks'
UTILS_MODULE = 'openedx.core.djangoapps.programs.utils'
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class GetApiClientTestCase(TestCase, ProgramsApiConfigMixin):
"""
Test the get_api_client function
"""
@mock.patch(TASKS_MODULE + '.JwtBuilder.build_token')
def test_get_api_client(self, mock_build_token):
"""
Ensure the function is making the right API calls based on inputs
"""
student = UserFactory()
ClientFactory.create(name='programs')
api_config = self.create_programs_config(
internal_service_url='http://foo',
api_version_number=99,
)
mock_build_token.return_value = 'test-token'
api_client = tasks.get_api_client(api_config, student)
self.assertEqual(api_client._store['base_url'], 'http://foo/api/v99/') # pylint: disable=protected-access
self.assertEqual(api_client._store['session'].auth.token, 'test-token') # pylint: disable=protected-access
@httpretty.activate
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class GetCompletedProgramsTestCase(ProgramsApiConfigMixin, CacheIsolationTestCase):
"""
Test the get_completed_programs function
"""
ENABLED_CACHES = ['default']
def setUp(self):
super(GetCompletedProgramsTestCase, self).setUp()
self.user = UserFactory()
self.programs_config = self.create_programs_config(cache_ttl=5)
ClientFactory(name=self.programs_config.OAUTH2_CLIENT_NAME, client_type=CONFIDENTIAL)
cache.clear()
def _mock_programs_api(self, data):
"""Helper for mocking out Programs API URLs."""
self.assertTrue(httpretty.is_enabled(), msg='httpretty must be enabled to mock Programs API calls.')
url = self.programs_config.internal_api_url.strip('/') + '/programs/'
body = json.dumps({'results': data})
httpretty.register_uri(httpretty.GET, url, body=body, content_type='application/json')
def _assert_num_requests(self, count):
"""DRY helper for verifying request counts."""
self.assertEqual(len(httpretty.httpretty.latest_requests), count)
@mock.patch(UTILS_MODULE + '.get_completed_courses')
def test_get_completed_programs(self, mock_get_completed_courses):
"""
Verify that completed programs are found, using the cache when possible.
"""
course_id = 'org/course/run'
data = [
factories.Program(
organizations=[factories.Organization()],
course_codes=[
factories.CourseCode(run_modes=[
factories.RunMode(course_key=course_id),
]),
]
),
]
self._mock_programs_api(data)
mock_get_completed_courses.return_value = [
{'course_id': course_id, 'mode': MODES.verified}
]
for _ in range(2):
result = tasks.get_completed_programs(self.user)
self.assertEqual(result, [data[0]['id']])
# Verify that only one request to programs was made (i.e., the cache was hit).
self._assert_num_requests(1)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class GetAwardedCertificateProgramsTestCase(TestCase):
"""
Test the get_awarded_certificate_programs function
"""
def make_credential_result(self, **kwargs):
"""
Helper to make dummy results from the credentials API
"""
result = {
'id': 1,
'username': 'dummy-username',
'credential': {
'credential_id': None,
'program_id': None,
},
'status': 'dummy-status',
'uuid': 'dummy-uuid',
'certificate_url': 'http://credentials.edx.org/credentials/dummy-uuid/'
}
result.update(**kwargs)
return result
@mock.patch(TASKS_MODULE + '.get_user_credentials')
def test_get_awarded_certificate_programs(self, mock_get_user_credentials):
"""
Ensure the API is called and results handled correctly.
"""
student = UserFactory(username='test-username')
mock_get_user_credentials.return_value = [
self.make_credential_result(status='awarded', credential={'program_id': 1}),
self.make_credential_result(status='awarded', credential={'course_id': 2}),
self.make_credential_result(status='revoked', credential={'program_id': 3}),
]
result = tasks.get_awarded_certificate_programs(student)
self.assertEqual(mock_get_user_credentials.call_args[0], (student, ))
self.assertEqual(result, [1])
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
class AwardProgramCertificateTestCase(TestCase):
"""
Test the award_program_certificate function
"""
@httpretty.activate
def test_award_program_certificate(self):
"""
Ensure the correct API call gets made
"""
test_username = 'test-username'
test_client = EdxRestApiClient('http://test-server', jwt='test-token')
httpretty.register_uri(
httpretty.POST,
'http://test-server/user_credentials/',
)
tasks.award_program_certificate(test_client, test_username, 123)
expected_body = {
'username': test_username,
'credential': {'program_id': 123},
'attributes': []
}
self.assertEqual(json.loads(httpretty.last_request().body), expected_body)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
@ddt.ddt
@mock.patch(TASKS_MODULE + '.award_program_certificate')
@mock.patch(TASKS_MODULE + '.get_awarded_certificate_programs')
@mock.patch(TASKS_MODULE + '.get_completed_programs')
@override_settings(CREDENTIALS_SERVICE_USERNAME='test-service-username')
class AwardProgramCertificatesTestCase(TestCase, ProgramsApiConfigMixin, CredentialsApiConfigMixin):
"""
Tests for the 'award_program_certificates' celery task.
"""
def setUp(self):
super(AwardProgramCertificatesTestCase, self).setUp()
self.create_programs_config()
self.create_credentials_config()
self.student = UserFactory.create(username='test-student')
ClientFactory.create(name='programs')
ClientFactory.create(name='credentials')
UserFactory.create(username=settings.CREDENTIALS_SERVICE_USERNAME) # pylint: disable=no-member
def test_completion_check(
self,
mock_get_completed_programs,
mock_get_awarded_certificate_programs, # pylint: disable=unused-argument
mock_award_program_certificate, # pylint: disable=unused-argument
):
"""
Checks that the Programs API is used correctly to determine completed
programs.
"""
tasks.award_program_certificates.delay(self.student.username).get()
mock_get_completed_programs.assert_called_once_with(self.student)
@ddt.data(
([1], [2, 3]),
([], [1, 2, 3]),
([1, 2, 3], []),
)
@ddt.unpack
def test_awarding_certs(
self,
already_awarded_program_ids,
expected_awarded_program_ids,
mock_get_completed_programs,
mock_get_awarded_certificate_programs,
mock_award_program_certificate,
):
"""
Checks that the Credentials API is used to award certificates for
the proper programs.
"""
mock_get_completed_programs.return_value = [1, 2, 3]
mock_get_awarded_certificate_programs.return_value = already_awarded_program_ids
tasks.award_program_certificates.delay(self.student.username).get()
actual_program_ids = [call[0][2] for call in mock_award_program_certificate.call_args_list]
self.assertEqual(actual_program_ids, expected_awarded_program_ids)
@ddt.data(
('programs', 'enable_certification'),
('credentials', 'enable_learner_issuance'),
)
@ddt.unpack
def test_retry_if_config_disabled(
self,
disabled_config_type,
disabled_config_attribute,
*mock_helpers
):
"""
Checks that the task is aborted if any relevant api configs are
disabled.
"""
getattr(self, 'create_{}_config'.format(disabled_config_type))(**{disabled_config_attribute: False})
with mock.patch(TASKS_MODULE + '.LOGGER.warning') as mock_warning:
with self.assertRaises(MaxRetriesExceededError):
tasks.award_program_certificates.delay(self.student.username).get()
self.assertTrue(mock_warning.called)
for mock_helper in mock_helpers:
self.assertFalse(mock_helper.called)
def test_abort_if_invalid_username(self, *mock_helpers):
"""
Checks that the task will be aborted and not retried if the username
passed was not found, and that an exception is logged.
"""
with mock.patch(TASKS_MODULE + '.LOGGER.exception') as mock_exception:
tasks.award_program_certificates.delay('nonexistent-username').get()
self.assertTrue(mock_exception.called)
for mock_helper in mock_helpers:
self.assertFalse(mock_helper.called)
def test_abort_if_no_completed_programs(
self,
mock_get_completed_programs,
mock_get_awarded_certificate_programs,
mock_award_program_certificate,
):
"""
Checks that the task will be aborted without further action if there
are no programs for which to award a certificate.
"""
mock_get_completed_programs.return_value = []
tasks.award_program_certificates.delay(self.student.username).get()
self.assertTrue(mock_get_completed_programs.called)
self.assertFalse(mock_get_awarded_certificate_programs.called)
self.assertFalse(mock_award_program_certificate.called)
def _make_side_effect(self, side_effects):
"""
DRY helper. Returns a side effect function for use with mocks that
will be called multiple times, permitting Exceptions to be raised
(or not) in a specified order.
See Also:
http://www.voidspace.org.uk/python/mock/examples.html#multiple-calls-with-different-effects
http://www.voidspace.org.uk/python/mock/mock.html#mock.Mock.side_effect
"""
def side_effect(*_a): # pylint: disable=missing-docstring
if side_effects:
exc = side_effects.pop(0)
if exc:
raise exc
return mock.DEFAULT
return side_effect
def test_continue_awarding_certs_if_error(
self,
mock_get_completed_programs,
mock_get_awarded_certificate_programs,
mock_award_program_certificate,
):
"""
Checks that a single failure to award one of several certificates
does not cause the entire task to fail. Also ensures that
successfully awarded certs are logged as INFO and exceptions
that arise are logged also.
"""
mock_get_completed_programs.return_value = [1, 2]
mock_get_awarded_certificate_programs.side_effect = [[], [2]]
mock_award_program_certificate.side_effect = self._make_side_effect([Exception('boom'), None])
with mock.patch(TASKS_MODULE + '.LOGGER.info') as mock_info, \
mock.patch(TASKS_MODULE + '.LOGGER.exception') as mock_exception:
tasks.award_program_certificates.delay(self.student.username).get()
self.assertEqual(mock_award_program_certificate.call_count, 3)
mock_exception.assert_called_once_with(mock.ANY, 1, self.student.username)
mock_info.assert_any_call(mock.ANY, 1, self.student.username)
mock_info.assert_any_call(mock.ANY, 2, self.student.username)
def test_retry_on_programs_api_errors(
self,
mock_get_completed_programs,
*_mock_helpers # pylint: disable=unused-argument
):
"""
Ensures that any otherwise-unhandled errors that arise while trying
to get completed programs (e.g. network issues or other
transient API errors) will cause the task to be failed and queued for
retry.
"""
mock_get_completed_programs.side_effect = self._make_side_effect([Exception('boom'), None])
tasks.award_program_certificates.delay(self.student.username).get()
self.assertEqual(mock_get_completed_programs.call_count, 2)
def test_retry_on_credentials_api_errors(
self,
mock_get_completed_programs,
mock_get_awarded_certificate_programs,
mock_award_program_certificate,
):
"""
Ensures that any otherwise-unhandled errors that arise while trying
to get existing program credentials (e.g. network issues or other
transient API errors) will cause the task to be failed and queued for
retry.
"""
mock_get_completed_programs.return_value = [1, 2]
mock_get_awarded_certificate_programs.return_value = [1]
mock_get_awarded_certificate_programs.side_effect = self._make_side_effect([Exception('boom'), None])
tasks.award_program_certificates.delay(self.student.username).get()
self.assertEqual(mock_get_awarded_certificate_programs.call_count, 2)
self.assertEqual(mock_award_program_certificate.call_count, 1)
| TheMOOCAgency/edx-platform | openedx/core/djangoapps/programs/tasks/v1/tests/test_tasks.py | Python | agpl-3.0 | 14,729 | 0.00224 |
# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from boto import handler
from boto.resultset import ResultSet
from boto.exception import BotoClientError
from boto.s3.acl import Policy, CannedACLStrings, Grant
from boto.s3.key import Key
from boto.s3.prefix import Prefix
from boto.s3.deletemarker import DeleteMarker
from boto.s3.multipart import MultiPartUpload
from boto.s3.multipart import CompleteMultiPartUpload
from boto.s3.multidelete import MultiDeleteResult
from boto.s3.multidelete import Error
from boto.s3.bucketlistresultset import BucketListResultSet
from boto.s3.bucketlistresultset import VersionedBucketListResultSet
from boto.s3.bucketlistresultset import MultiPartUploadListResultSet
from boto.s3.lifecycle import Lifecycle
from boto.s3.bucketlogging import BucketLogging
import boto.jsonresponse
import boto.utils
import xml.sax
import xml.sax.saxutils
import StringIO
import urllib
import re
import base64
from collections import defaultdict
# as per http://goo.gl/BDuud (02/19/2011)
class S3WebsiteEndpointTranslate:
trans_region = defaultdict(lambda :'s3-website-us-east-1')
trans_region['eu-west-1'] = 's3-website-eu-west-1'
trans_region['us-west-1'] = 's3-website-us-west-1'
trans_region['us-west-2'] = 's3-website-us-west-2'
trans_region['sa-east-1'] = 's3-website-sa-east-1'
trans_region['ap-northeast-1'] = 's3-website-ap-northeast-1'
trans_region['ap-southeast-1'] = 's3-website-ap-southeast-1'
@classmethod
def translate_region(self, reg):
return self.trans_region[reg]
S3Permissions = ['READ', 'WRITE', 'READ_ACP', 'WRITE_ACP', 'FULL_CONTROL']
class Bucket(object):
LoggingGroup = 'http://acs.amazonaws.com/groups/s3/LogDelivery'
BucketPaymentBody = """<?xml version="1.0" encoding="UTF-8"?>
<RequestPaymentConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Payer>%s</Payer>
</RequestPaymentConfiguration>"""
VersioningBody = """<?xml version="1.0" encoding="UTF-8"?>
<VersioningConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<Status>%s</Status>
<MfaDelete>%s</MfaDelete>
</VersioningConfiguration>"""
WebsiteBody = """<?xml version="1.0" encoding="UTF-8"?>
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<IndexDocument><Suffix>%s</Suffix></IndexDocument>
%s
</WebsiteConfiguration>"""
WebsiteErrorFragment = """<ErrorDocument><Key>%s</Key></ErrorDocument>"""
VersionRE = '<Status>([A-Za-z]+)</Status>'
MFADeleteRE = '<MfaDelete>([A-Za-z]+)</MfaDelete>'
def __init__(self, connection=None, name=None, key_class=Key):
self.name = name
self.connection = connection
self.key_class = key_class
def __repr__(self):
return '<Bucket: %s>' % self.name
def __iter__(self):
return iter(BucketListResultSet(self))
def __contains__(self, key_name):
return not (self.get_key(key_name) is None)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Name':
self.name = value
elif name == 'CreationDate':
self.creation_date = value
else:
setattr(self, name, value)
def set_key_class(self, key_class):
"""
Set the Key class associated with this bucket. By default, this
would be the boto.s3.key.Key class but if you want to subclass that
for some reason this allows you to associate your new class with a
bucket so that when you call bucket.new_key() or when you get a listing
of keys in the bucket you will get an instances of your key class
rather than the default.
:type key_class: class
:param key_class: A subclass of Key that can be more specific
"""
self.key_class = key_class
def lookup(self, key_name, headers=None):
"""
Deprecated: Please use get_key method.
:type key_name: string
:param key_name: The name of the key to retrieve
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
return self.get_key(key_name, headers=headers)
def get_key(self, key_name, headers=None, version_id=None):
"""
Check to see if a particular key exists within the bucket. This
method uses a HEAD request to check for the existance of the key.
Returns: An instance of a Key object or None
:type key_name: string
:param key_name: The name of the key to retrieve
:rtype: :class:`boto.s3.key.Key`
:returns: A Key object from this bucket.
"""
if version_id:
query_args = 'versionId=%s' % version_id
else:
query_args = None
response = self.connection.make_request('HEAD', self.name, key_name,
headers=headers,
query_args=query_args)
# Allow any success status (2xx) - for example this lets us
# support Range gets, which return status 206:
if response.status/100 == 2:
response.read()
k = self.key_class(self)
provider = self.connection.provider
k.metadata = boto.utils.get_aws_metadata(response.msg, provider)
k.etag = response.getheader('etag')
k.content_type = response.getheader('content-type')
k.content_encoding = response.getheader('content-encoding')
k.last_modified = response.getheader('last-modified')
# the following machinations are a workaround to the fact that
# apache/fastcgi omits the content-length header on HEAD
# requests when the content-length is zero.
# See http://goo.gl/0Tdax for more details.
clen = response.getheader('content-length')
if clen:
k.size = int(response.getheader('content-length'))
else:
k.size = 0
k.cache_control = response.getheader('cache-control')
k.name = key_name
k.handle_version_headers(response)
k.handle_encryption_headers(response)
return k
else:
if response.status == 404:
response.read()
return None
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, '')
def list(self, prefix='', delimiter='', marker='', headers=None):
"""
List key objects within a bucket. This returns an instance of an
BucketListResultSet that automatically handles all of the result
paging, etc. from S3. You just need to keep iterating until
there are no more results.
Called with no arguments, this will return an iterator object across
all keys within the bucket.
The Key objects returned by the iterator are obtained by parsing
the results of a GET on the bucket, also known as the List Objects
request. The XML returned by this request contains only a subset
of the information about each key. Certain metadata fields such
as Content-Type and user metadata are not available in the XML.
Therefore, if you want these additional metadata fields you will
have to do a HEAD request on the Key in the bucket.
:type prefix: string
:param prefix: allows you to limit the listing to a particular
prefix. For example, if you call the method with
prefix='/foo/' then the iterator will only cycle
through the keys that begin with the string '/foo/'.
:type delimiter: string
:param delimiter: can be used in conjunction with the prefix
to allow you to organize and browse your keys
hierarchically. See:
http://docs.amazonwebservices.com/AmazonS3/2006-03-01/
for more details.
:type marker: string
:param marker: The "marker" of where you are in the result set
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return BucketListResultSet(self, prefix, delimiter, marker, headers)
def list_versions(self, prefix='', delimiter='', key_marker='',
version_id_marker='', headers=None):
"""
List version objects within a bucket. This returns an instance of an
VersionedBucketListResultSet that automatically handles all of the result
paging, etc. from S3. You just need to keep iterating until
there are no more results.
Called with no arguments, this will return an iterator object across
all keys within the bucket.
:type prefix: string
:param prefix: allows you to limit the listing to a particular
prefix. For example, if you call the method with
prefix='/foo/' then the iterator will only cycle
through the keys that begin with the string '/foo/'.
:type delimiter: string
:param delimiter: can be used in conjunction with the prefix
to allow you to organize and browse your keys
hierarchically. See:
http://docs.amazonwebservices.com/AmazonS3/2006-03-01/
for more details.
:type marker: string
:param marker: The "marker" of where you are in the result set
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return VersionedBucketListResultSet(self, prefix, delimiter, key_marker,
version_id_marker, headers)
def list_multipart_uploads(self, key_marker='',
upload_id_marker='',
headers=None):
"""
List multipart upload objects within a bucket. This returns an
instance of an MultiPartUploadListResultSet that automatically
handles all of the result paging, etc. from S3. You just need
to keep iterating until there are no more results.
:type marker: string
:param marker: The "marker" of where you are in the result set
:rtype: :class:`boto.s3.bucketlistresultset.BucketListResultSet`
:return: an instance of a BucketListResultSet that handles paging, etc
"""
return MultiPartUploadListResultSet(self, key_marker,
upload_id_marker,
headers)
def _get_all(self, element_map, initial_query_string='',
headers=None, **params):
l = []
for k, v in params.items():
k = k.replace('_', '-')
if k == 'maxkeys':
k = 'max-keys'
if isinstance(v, unicode):
v = v.encode('utf-8')
if v is not None and v != '':
l.append('%s=%s' % (urllib.quote(k), urllib.quote(str(v))))
if len(l):
s = initial_query_string + '&' + '&'.join(l)
else:
s = initial_query_string
response = self.connection.make_request('GET', self.name,
headers=headers,
query_args=s)
body = response.read()
boto.log.debug(body)
if response.status == 200:
rs = ResultSet(element_map)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
return rs
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_all_keys(self, headers=None, **params):
"""
A lower-level method for listing contents of a bucket.
This closely models the actual S3 API and requires you to manually
handle the paging of results. For a higher-level method
that handles the details of paging for you, you can use the list method.
:type max_keys: int
:param max_keys: The maximum number of keys to retrieve
:type prefix: string
:param prefix: The prefix of the keys you want to retrieve
:type marker: string
:param marker: The "marker" of where you are in the result set
:type delimiter: string
:param delimiter: If this optional, Unicode string parameter
is included with your request, then keys that
contain the same string between the prefix and
the first occurrence of the delimiter will be
rolled up into a single result element in the
CommonPrefixes collection. These rolled-up keys
are not returned elsewhere in the response.
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
return self._get_all([('Contents', self.key_class),
('CommonPrefixes', Prefix)],
'', headers, **params)
def get_all_versions(self, headers=None, **params):
"""
A lower-level, version-aware method for listing contents of a bucket.
This closely models the actual S3 API and requires you to manually
handle the paging of results. For a higher-level method
that handles the details of paging for you, you can use the list method.
:type max_keys: int
:param max_keys: The maximum number of keys to retrieve
:type prefix: string
:param prefix: The prefix of the keys you want to retrieve
:type key_marker: string
:param key_marker: The "marker" of where you are in the result set
with respect to keys.
:type version_id_marker: string
:param version_id_marker: The "marker" of where you are in the result
set with respect to version-id's.
:type delimiter: string
:param delimiter: If this optional, Unicode string parameter
is included with your request, then keys that
contain the same string between the prefix and
the first occurrence of the delimiter will be
rolled up into a single result element in the
CommonPrefixes collection. These rolled-up keys
are not returned elsewhere in the response.
:rtype: ResultSet
:return: The result from S3 listing the keys requested
"""
return self._get_all([('Version', self.key_class),
('CommonPrefixes', Prefix),
('DeleteMarker', DeleteMarker)],
'versions', headers, **params)
def get_all_multipart_uploads(self, headers=None, **params):
"""
A lower-level, version-aware method for listing active
MultiPart uploads for a bucket. This closely models the
actual S3 API and requires you to manually handle the paging
of results. For a higher-level method that handles the
details of paging for you, you can use the list method.
:type max_uploads: int
:param max_uploads: The maximum number of uploads to retrieve.
Default value is 1000.
:type key_marker: string
:param key_marker: Together with upload_id_marker, this parameter
specifies the multipart upload after which listing
should begin. If upload_id_marker is not specified,
only the keys lexicographically greater than the
specified key_marker will be included in the list.
If upload_id_marker is specified, any multipart
uploads for a key equal to the key_marker might
also be included, provided those multipart uploads
have upload IDs lexicographically greater than the
specified upload_id_marker.
:type upload_id_marker: string
:param upload_id_marker: Together with key-marker, specifies
the multipart upload after which listing
should begin. If key_marker is not specified,
the upload_id_marker parameter is ignored.
Otherwise, any multipart uploads for a key
equal to the key_marker might be included
in the list only if they have an upload ID
lexicographically greater than the specified
upload_id_marker.
:rtype: ResultSet
:return: The result from S3 listing the uploads requested
"""
return self._get_all([('Upload', MultiPartUpload),
('CommonPrefixes', Prefix)],
'uploads', headers, **params)
def new_key(self, key_name=None):
"""
Creates a new key
:type key_name: string
:param key_name: The name of the key to create
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: An instance of the newly created key object
"""
return self.key_class(self, key_name)
def generate_url(self, expires_in, method='GET', headers=None,
force_http=False, response_headers=None,
expires_in_absolute=False):
return self.connection.generate_url(expires_in, method, self.name,
headers=headers,
force_http=force_http,
response_headers=response_headers,
expires_in_absolute=expires_in_absolute)
def delete_keys(self, keys, quiet=False, mfa_token=None, headers=None):
"""
Deletes a set of keys using S3's Multi-object delete API. If a
VersionID is specified for that key then that version is removed.
Returns a MultiDeleteResult Object, which contains Deleted
and Error elements for each key you ask to delete.
:type keys: list
:param keys: A list of either key_names or (key_name, versionid) pairs
or a list of Key instances.
:type quiet: boolean
:param quiet: In quiet mode the response includes only keys where
the delete operation encountered an error. For a
successful deletion, the operation does not return
any information about the delete in the response body.
:type mfa_token: tuple or list of strings
:param mfa_token: A tuple or list consisting of the serial number
from the MFA device and the current value of
the six-digit token associated with the device.
This value is required anytime you are
deleting versioned objects from a bucket
that has the MFADelete option on the bucket.
:returns: An instance of MultiDeleteResult
"""
ikeys = iter(keys)
result = MultiDeleteResult(self)
provider = self.connection.provider
query_args = 'delete'
def delete_keys2(hdrs):
hdrs = hdrs or {}
data = u"""<?xml version="1.0" encoding="UTF-8"?>"""
data += u"<Delete>"
if quiet:
data += u"<Quiet>true</Quiet>"
count = 0
while count < 1000:
try:
key = ikeys.next()
except StopIteration:
break
if isinstance(key, basestring):
key_name = key
version_id = None
elif isinstance(key, tuple) and len(key) == 2:
key_name, version_id = key
elif (isinstance(key, Key) or isinstance(key, DeleteMarker)) and key.name:
key_name = key.name
version_id = key.version_id
else:
if isinstance(key, Prefix):
key_name = key.name
code = 'PrefixSkipped' # Don't delete Prefix
else:
key_name = repr(key) # try get a string
code = 'InvalidArgument' # other unknown type
message = 'Invalid. No delete action taken for this object.'
error = Error(key_name, code=code, message=message)
result.errors.append(error)
continue
count += 1
#key_name = key_name.decode('utf-8')
data += u"<Object><Key>%s</Key>" % xml.sax.saxutils.escape(key_name)
if version_id:
data += u"<VersionId>%s</VersionId>" % version_id
data += u"</Object>"
data += u"</Delete>"
if count <= 0:
return False # no more
data = data.encode('utf-8')
fp = StringIO.StringIO(data)
md5 = boto.utils.compute_md5(fp)
hdrs['Content-MD5'] = md5[1]
hdrs['Content-Type'] = 'text/xml'
if mfa_token:
hdrs[provider.mfa_header] = ' '.join(mfa_token)
response = self.connection.make_request('POST', self.name,
headers=hdrs,
query_args=query_args,
data=data)
body = response.read()
if response.status == 200:
h = handler.XmlHandler(result, self)
xml.sax.parseString(body, h)
return count >= 1000 # more?
else:
raise provider.storage_response_error(response.status,
response.reason,
body)
while delete_keys2(headers):
pass
return result
def delete_key(self, key_name, headers=None,
version_id=None, mfa_token=None):
"""
Deletes a key from the bucket. If a version_id is provided,
only that version of the key will be deleted.
:type key_name: string
:param key_name: The key name to delete
:type version_id: string
:param version_id: The version ID (optional)
:type mfa_token: tuple or list of strings
:param mfa_token: A tuple or list consisting of the serial number
from the MFA device and the current value of
the six-digit token associated with the device.
This value is required anytime you are
deleting versioned objects from a bucket
that has the MFADelete option on the bucket.
"""
provider = self.connection.provider
if version_id:
query_args = 'versionId=%s' % version_id
else:
query_args = None
if mfa_token:
if not headers:
headers = {}
headers[provider.mfa_header] = ' '.join(mfa_token)
response = self.connection.make_request('DELETE', self.name, key_name,
headers=headers,
query_args=query_args)
body = response.read()
if response.status != 204:
raise provider.storage_response_error(response.status,
response.reason, body)
def copy_key(self, new_key_name, src_bucket_name,
src_key_name, metadata=None, src_version_id=None,
storage_class='STANDARD', preserve_acl=False,
encrypt_key=False, headers=None, query_args=None):
"""
Create a new key in the bucket by copying another existing key.
:type new_key_name: string
:param new_key_name: The name of the new key
:type src_bucket_name: string
:param src_bucket_name: The name of the source bucket
:type src_key_name: string
:param src_key_name: The name of the source key
:type src_version_id: string
:param src_version_id: The version id for the key. This param
is optional. If not specified, the newest
version of the key will be copied.
:type metadata: dict
:param metadata: Metadata to be associated with new key.
If metadata is supplied, it will replace the
metadata of the source key being copied.
If no metadata is supplied, the source key's
metadata will be copied to the new key.
:type storage_class: string
:param storage_class: The storage class of the new key.
By default, the new key will use the
standard storage class. Possible values are:
STANDARD | REDUCED_REDUNDANCY
:type preserve_acl: bool
:param preserve_acl: If True, the ACL from the source key
will be copied to the destination
key. If False, the destination key
will have the default ACL.
Note that preserving the ACL in the
new key object will require two
additional API calls to S3, one to
retrieve the current ACL and one to
set that ACL on the new object. If
you don't care about the ACL, a value
of False will be significantly more
efficient.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and
will be stored in an encrypted form while
at rest in S3.
:type headers: dict
:param headers: A dictionary of header name/value pairs.
:type query_args: string
:param query_args: A string of additional querystring arguments
to append to the request
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: An instance of the newly created key object
"""
headers = headers or {}
provider = self.connection.provider
src_key_name = boto.utils.get_utf8_value(src_key_name)
if preserve_acl:
if self.name == src_bucket_name:
src_bucket = self
else:
src_bucket = self.connection.get_bucket(src_bucket_name)
acl = src_bucket.get_xml_acl(src_key_name)
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
src = '%s/%s' % (src_bucket_name, urllib.quote(src_key_name))
if src_version_id:
src += '?versionId=%s' % src_version_id
headers[provider.copy_source_header] = str(src)
# make sure storage_class_header key exists before accessing it
if provider.storage_class_header and storage_class:
headers[provider.storage_class_header] = storage_class
if metadata:
headers[provider.metadata_directive_header] = 'REPLACE'
headers = boto.utils.merge_meta(headers, metadata, provider)
elif not query_args: # Can't use this header with multi-part copy.
headers[provider.metadata_directive_header] = 'COPY'
response = self.connection.make_request('PUT', self.name, new_key_name,
headers=headers,
query_args=query_args)
body = response.read()
if response.status == 200:
key = self.new_key(new_key_name)
h = handler.XmlHandler(key, self)
xml.sax.parseString(body, h)
if hasattr(key, 'Error'):
raise provider.storage_copy_error(key.Code, key.Message, body)
key.handle_version_headers(response)
if preserve_acl:
self.set_xml_acl(acl, new_key_name)
return key
else:
raise provider.storage_response_error(response.status,
response.reason, body)
def set_canned_acl(self, acl_str, key_name='', headers=None,
version_id=None):
assert acl_str in CannedACLStrings
if headers:
headers[self.connection.provider.acl_header] = acl_str
else:
headers={self.connection.provider.acl_header: acl_str}
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('PUT', self.name, key_name,
headers=headers, query_args=query_args)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_xml_acl(self, key_name='', headers=None, version_id=None):
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('GET', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
return body
def set_xml_acl(self, acl_str, key_name='', headers=None, version_id=None,
query_args='acl'):
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('PUT', self.name, key_name,
data=acl_str.encode('ISO-8859-1'),
query_args=query_args,
headers=headers)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_acl(self, acl_or_str, key_name='', headers=None, version_id=None):
if isinstance(acl_or_str, Policy):
self.set_xml_acl(acl_or_str.to_xml(), key_name,
headers, version_id)
else:
self.set_canned_acl(acl_or_str, key_name,
headers, version_id)
def get_acl(self, key_name='', headers=None, version_id=None):
query_args = 'acl'
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('GET', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
if response.status == 200:
policy = Policy(self)
h = handler.XmlHandler(policy, self)
xml.sax.parseString(body, h)
return policy
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_subresource(self, subresource, value, key_name = '', headers=None,
version_id=None):
"""
Set a subresource for a bucket or key.
:type subresource: string
:param subresource: The subresource to set.
:type value: string
:param value: The value of the subresource.
:type key_name: string
:param key_name: The key to operate on, or None to operate on the
bucket.
:type headers: dict
:param headers: Additional HTTP headers to include in the request.
:type src_version_id: string
:param src_version_id: Optional. The version id of the key to operate
on. If not specified, operate on the newest
version.
"""
if not subresource:
raise TypeError('set_subresource called with subresource=None')
query_args = subresource
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('PUT', self.name, key_name,
data=value.encode('UTF-8'),
query_args=query_args,
headers=headers)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_subresource(self, subresource, key_name='', headers=None,
version_id=None):
"""
Get a subresource for a bucket or key.
:type subresource: string
:param subresource: The subresource to get.
:type key_name: string
:param key_name: The key to operate on, or None to operate on the
bucket.
:type headers: dict
:param headers: Additional HTTP headers to include in the request.
:type src_version_id: string
:param src_version_id: Optional. The version id of the key to operate
on. If not specified, operate on the newest
version.
:rtype: string
:returns: The value of the subresource.
"""
if not subresource:
raise TypeError('get_subresource called with subresource=None')
query_args = subresource
if version_id:
query_args += '&versionId=%s' % version_id
response = self.connection.make_request('GET', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
if response.status != 200:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
return body
def make_public(self, recursive=False, headers=None):
self.set_canned_acl('public-read', headers=headers)
if recursive:
for key in self:
self.set_canned_acl('public-read', key.name, headers=headers)
def add_email_grant(self, permission, email_address,
recursive=False, headers=None):
"""
Convenience method that provides a quick way to add an email grant
to a bucket. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL
and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type email_address: string
:param email_address: The email address associated with the AWS
account your are granting the permission to.
:type recursive: boolean
:param recursive: A boolean value to controls whether the command
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
"""
if permission not in S3Permissions:
raise self.connection.provider.storage_permissions_error(
'Unknown Permission: %s' % permission)
policy = self.get_acl(headers=headers)
policy.acl.add_email_grant(permission, email_address)
self.set_acl(policy, headers=headers)
if recursive:
for key in self:
key.add_email_grant(permission, email_address, headers=headers)
def add_user_grant(self, permission, user_id, recursive=False,
headers=None, display_name=None):
"""
Convenience method that provides a quick way to add a canonical
user grant to a bucket. This method retrieves the current ACL,
creates a new grant based on the parameters passed in, adds that
grant to the ACL and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type user_id: string
:param user_id: The canonical user id associated with the AWS
account your are granting the permission to.
:type recursive: boolean
:param recursive: A boolean value to controls whether the command
will apply the grant to all keys within the bucket
or not. The default value is False. By passing a
True value, the call will iterate through all keys
in the bucket and apply the same grant to each key.
CAUTION: If you have a lot of keys, this could take
a long time!
:type display_name: string
:param display_name: An option string containing the user's
Display Name. Only required on Walrus.
"""
if permission not in S3Permissions:
raise self.connection.provider.storage_permissions_error(
'Unknown Permission: %s' % permission)
policy = self.get_acl(headers=headers)
policy.acl.add_user_grant(permission, user_id,
display_name=display_name)
self.set_acl(policy, headers=headers)
if recursive:
for key in self:
key.add_user_grant(permission, user_id, headers=headers,
display_name=display_name)
def list_grants(self, headers=None):
policy = self.get_acl(headers=headers)
return policy.acl.grants
def get_location(self):
"""
Returns the LocationConstraint for the bucket.
:rtype: str
:return: The LocationConstraint for the bucket or the empty
string if no constraint was specified when bucket
was created.
"""
response = self.connection.make_request('GET', self.name,
query_args='location')
body = response.read()
if response.status == 200:
rs = ResultSet(self)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
return rs.LocationConstraint
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_xml_logging(self, logging_str, headers=None):
"""
Set logging on a bucket directly to the given xml string.
:type logging_str: unicode string
:param logging_str: The XML for the bucketloggingstatus which will be set.
The string will be converted to utf-8 before it is sent.
Usually, you will obtain this XML from the BucketLogging
object.
:rtype: bool
:return: True if ok or raises an exception.
"""
body = logging_str.encode('utf-8')
response = self.connection.make_request('PUT', self.name, data=body,
query_args='logging', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def enable_logging(self, target_bucket, target_prefix='', grants=None, headers=None):
"""
Enable logging on a bucket.
:type target_bucket: bucket or string
:param target_bucket: The bucket to log to.
:type target_prefix: string
:param target_prefix: The prefix which should be prepended to the
generated log files written to the target_bucket.
:type grants: list of Grant objects
:param grants: A list of extra permissions which will be granted on
the log files which are created.
:rtype: bool
:return: True if ok or raises an exception.
"""
if isinstance(target_bucket, Bucket):
target_bucket = target_bucket.name
blogging = BucketLogging(target=target_bucket, prefix=target_prefix, grants=grants)
return self.set_xml_logging(blogging.to_xml(), headers=headers)
def disable_logging(self, headers=None):
"""
Disable logging on a bucket.
:rtype: bool
:return: True if ok or raises an exception.
"""
blogging = BucketLogging()
return self.set_xml_logging(blogging.to_xml(), headers=headers)
def get_logging_status(self, headers=None):
"""
Get the logging status for this bucket.
:rtype: :class:`boto.s3.bucketlogging.BucketLogging`
:return: A BucketLogging object for this bucket.
"""
response = self.connection.make_request('GET', self.name,
query_args='logging', headers=headers)
body = response.read()
if response.status == 200:
blogging = BucketLogging()
h = handler.XmlHandler(blogging, self)
xml.sax.parseString(body, h)
return blogging
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_as_logging_target(self, headers=None):
"""
Setup the current bucket as a logging target by granting the necessary
permissions to the LogDelivery group to write log files to this bucket.
"""
policy = self.get_acl(headers=headers)
g1 = Grant(permission='WRITE', type='Group', uri=self.LoggingGroup)
g2 = Grant(permission='READ_ACP', type='Group', uri=self.LoggingGroup)
policy.acl.add_grant(g1)
policy.acl.add_grant(g2)
self.set_acl(policy, headers=headers)
def get_request_payment(self, headers=None):
response = self.connection.make_request('GET', self.name,
query_args='requestPayment', headers=headers)
body = response.read()
if response.status == 200:
return body
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_request_payment(self, payer='BucketOwner', headers=None):
body = self.BucketPaymentBody % payer
response = self.connection.make_request('PUT', self.name, data=body,
query_args='requestPayment', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def configure_versioning(self, versioning, mfa_delete=False,
mfa_token=None, headers=None):
"""
Configure versioning for this bucket.
..note:: This feature is currently in beta.
:type versioning: bool
:param versioning: A boolean indicating whether version is
enabled (True) or disabled (False).
:type mfa_delete: bool
:param mfa_delete: A boolean indicating whether the Multi-Factor
Authentication Delete feature is enabled (True)
or disabled (False). If mfa_delete is enabled
then all Delete operations will require the
token from your MFA device to be passed in
the request.
:type mfa_token: tuple or list of strings
:param mfa_token: A tuple or list consisting of the serial number
from the MFA device and the current value of
the six-digit token associated with the device.
This value is required when you are changing
the status of the MfaDelete property of
the bucket.
"""
if versioning:
ver = 'Enabled'
else:
ver = 'Suspended'
if mfa_delete:
mfa = 'Enabled'
else:
mfa = 'Disabled'
body = self.VersioningBody % (ver, mfa)
if mfa_token:
if not headers:
headers = {}
provider = self.connection.provider
headers[provider.mfa_header] = ' '.join(mfa_token)
response = self.connection.make_request('PUT', self.name, data=body,
query_args='versioning', headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_versioning_status(self, headers=None):
"""
Returns the current status of versioning on the bucket.
:rtype: dict
:returns: A dictionary containing a key named 'Versioning'
that can have a value of either Enabled, Disabled,
or Suspended. Also, if MFADelete has ever been enabled
on the bucket, the dictionary will contain a key
named 'MFADelete' which will have a value of either
Enabled or Suspended.
"""
response = self.connection.make_request('GET', self.name,
query_args='versioning', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
d = {}
ver = re.search(self.VersionRE, body)
if ver:
d['Versioning'] = ver.group(1)
mfa = re.search(self.MFADeleteRE, body)
if mfa:
d['MfaDelete'] = mfa.group(1)
return d
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def configure_lifecycle(self, lifecycle_config, headers=None):
"""
Configure lifecycle for this bucket.
:type lifecycle_config: :class:`boto.s3.lifecycle.Lifecycle`
:param lifecycle_config: The lifecycle configuration you want
to configure for this bucket.
"""
fp = StringIO.StringIO(lifecycle_config.to_xml())
md5 = boto.utils.compute_md5(fp)
if headers is None:
headers = {}
headers['Content-MD5'] = md5[1]
headers['Content-Type'] = 'text/xml'
response = self.connection.make_request('PUT', self.name,
data=fp.getvalue(),
query_args='lifecycle',
headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_lifecycle_config(self, headers=None):
"""
Returns the current lifecycle configuration on the bucket.
:rtype: :class:`boto.s3.lifecycle.Lifecycle`
:returns: A LifecycleConfig object that describes all current
lifecycle rules in effect for the bucket.
"""
response = self.connection.make_request('GET', self.name,
query_args='lifecycle', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
lifecycle = Lifecycle()
h = handler.XmlHandler(lifecycle, self)
xml.sax.parseString(body, h)
return lifecycle
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def delete_lifecycle_configuration(self, headers=None):
"""
Removes all lifecycle configuration from the bucket.
"""
response = self.connection.make_request('DELETE', self.name,
query_args='lifecycle',
headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 204:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def configure_website(self, suffix, error_key='', headers=None):
"""
Configure this bucket to act as a website
:type suffix: str
:param suffix: Suffix that is appended to a request that is for a
"directory" on the website endpoint (e.g. if the suffix
is index.html and you make a request to
samplebucket/images/ the data that is returned will
be for the object with the key name images/index.html).
The suffix must not be empty and must not include a
slash character.
:type error_key: str
:param error_key: The object key name to use when a 4XX class
error occurs. This is optional.
"""
if error_key:
error_frag = self.WebsiteErrorFragment % error_key
else:
error_frag = ''
body = self.WebsiteBody % (suffix, error_frag)
response = self.connection.make_request('PUT', self.name, data=body,
query_args='website',
headers=headers)
body = response.read()
if response.status == 200:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_website_configuration(self, headers=None):
"""
Returns the current status of website configuration on the bucket.
:rtype: dict
:returns: A dictionary containing a Python representation
of the XML response from S3. The overall structure is:
* WebsiteConfiguration
* IndexDocument
* Suffix : suffix that is appended to request that
is for a "directory" on the website endpoint
* ErrorDocument
* Key : name of object to serve when an error occurs
"""
response = self.connection.make_request('GET', self.name,
query_args='website', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
e = boto.jsonresponse.Element()
h = boto.jsonresponse.XmlHandler(e, None)
h.parse(body)
return e
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def delete_website_configuration(self, headers=None):
"""
Removes all website configuration from the bucket.
"""
response = self.connection.make_request('DELETE', self.name,
query_args='website', headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 204:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def get_website_endpoint(self):
"""
Returns the fully qualified hostname to use is you want to access this
bucket as a website. This doesn't validate whether the bucket has
been correctly configured as a website or not.
"""
l = [self.name]
l.append(S3WebsiteEndpointTranslate.translate_region(self.get_location()))
l.append('.'.join(self.connection.host.split('.')[-2:]))
return '.'.join(l)
def get_policy(self, headers=None):
"""
Returns the JSON policy associated with the bucket. The policy
is returned as an uninterpreted JSON string.
"""
response = self.connection.make_request('GET', self.name,
query_args='policy', headers=headers)
body = response.read()
if response.status == 200:
return body
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def set_policy(self, policy, headers=None):
"""
Add or replace the JSON policy associated with the bucket.
:type policy: str
:param policy: The JSON policy as a string.
"""
response = self.connection.make_request('PUT', self.name,
data=policy,
query_args='policy',
headers=headers)
body = response.read()
if response.status >= 200 and response.status <= 204:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def delete_policy(self, headers=None):
response = self.connection.make_request('DELETE', self.name,
data='/?policy',
query_args='policy',
headers=headers)
body = response.read()
if response.status >= 200 and response.status <= 204:
return True
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def initiate_multipart_upload(self, key_name, headers=None,
reduced_redundancy=False,
metadata=None, encrypt_key=False):
"""
Start a multipart upload operation.
:type key_name: string
:param key_name: The name of the key that will ultimately result from
this multipart upload operation. This will be exactly
as the key appears in the bucket after the upload
process has been completed.
:type headers: dict
:param headers: Additional HTTP headers to send and store with the
resulting key in S3.
:type reduced_redundancy: boolean
:param reduced_redundancy: In multipart uploads, the storage class is
specified when initiating the upload,
not when uploading individual parts. So
if you want the resulting key to use the
reduced redundancy storage class set this
flag when you initiate the upload.
:type metadata: dict
:param metadata: Any metadata that you would like to set on the key
that results from the multipart upload.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and
will be stored in an encrypted form while
at rest in S3.
"""
query_args = 'uploads'
provider = self.connection.provider
if headers is None:
headers = {}
if reduced_redundancy:
storage_class_header = provider.storage_class_header
if storage_class_header:
headers[storage_class_header] = 'REDUCED_REDUNDANCY'
# TODO: what if the provider doesn't support reduced redundancy?
# (see boto.s3.key.Key.set_contents_from_file)
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
if metadata is None:
metadata = {}
headers = boto.utils.merge_meta(headers, metadata,
self.connection.provider)
response = self.connection.make_request('POST', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
boto.log.debug(body)
if response.status == 200:
resp = MultiPartUpload(self)
h = handler.XmlHandler(resp, self)
xml.sax.parseString(body, h)
return resp
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def complete_multipart_upload(self, key_name, upload_id,
xml_body, headers=None):
"""
Complete a multipart upload operation.
"""
query_args = 'uploadId=%s' % upload_id
if headers is None:
headers = {}
headers['Content-Type'] = 'text/xml'
response = self.connection.make_request('POST', self.name, key_name,
query_args=query_args,
headers=headers, data=xml_body)
contains_error = False
body = response.read()
# Some errors will be reported in the body of the response
# even though the HTTP response code is 200. This check
# does a quick and dirty peek in the body for an error element.
if body.find('<Error>') > 0:
contains_error = True
boto.log.debug(body)
if response.status == 200 and not contains_error:
resp = CompleteMultiPartUpload(self)
h = handler.XmlHandler(resp, self)
xml.sax.parseString(body, h)
return resp
else:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def cancel_multipart_upload(self, key_name, upload_id, headers=None):
query_args = 'uploadId=%s' % upload_id
response = self.connection.make_request('DELETE', self.name, key_name,
query_args=query_args,
headers=headers)
body = response.read()
boto.log.debug(body)
if response.status != 204:
raise self.connection.provider.storage_response_error(
response.status, response.reason, body)
def delete(self, headers=None):
return self.connection.delete_bucket(self.name, headers=headers)
| kumar303/rockit | vendor-local/boto/s3/bucket.py | Python | bsd-3-clause | 62,883 | 0.001749 |
"""Modern art."""
from galry import *
import numpy.random as rdn
figure(constrain_ratio=True, antialiasing=True)
# random positions
positions = .25 * rdn.randn(1000, 2)
# random colors
colors = rdn.rand(len(positions),4)
# TRIANGLES: three consecutive points = one triangle, no overlap
plot(primitive_type='TRIANGLES', position=positions, color=colors)
show()
| rossant/galry | examples/modern_art.py | Python | bsd-3-clause | 365 | 0.00274 |
from os import environ
AWS_ACCESS_KEY_ID = environ["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = environ["AWS_SECRET_ACCESS_KEY"]
BUCKET_NAME = "22board-captchas"
AWS_ROOT_URL = "https://s3-eu-west-1.amazonaws.com/{}/".format(BUCKET_NAME)
| volnt/22board | app/config.py | Python | mit | 240 | 0 |
import logging.config
logging.config.fileConfig("config/logging.conf")
logger = logging.getLogger("temp")
logger.info("Using temperature logger")
| cubiks/rpi_thermo_py | test/test_logging.py | Python | mit | 147 | 0 |
#!/usr/bin/env python3
import argparse
import os
import sys
from ucca.ioutil import get_passages_with_progress_bar, write_passage
desc = """Rename passages by a given mapping of IDs"""
def main(filename, input_filenames, outdir):
os.makedirs(outdir, exist_ok=True)
with open(filename, encoding="utf-8") as f:
pairs = [line.strip().split() for line in f]
old_to_new_id = {old_id: new_id for new_id, old_id in pairs}
for passage in get_passages_with_progress_bar(input_filenames, desc="Renaming"):
passage._ID = old_to_new_id[passage.ID]
write_passage(passage, outdir=outdir, verbose=False)
if __name__ == "__main__":
argument_parser = argparse.ArgumentParser(description=desc)
argument_parser.add_argument("filename", help="file with lines of the form <NEW ID> <OLD ID>")
argument_parser.add_argument("input_filenames", help="filename pattern or directory with input passages")
argument_parser.add_argument("-o", "--outdir", default=".", help="output directory")
main(**vars(argument_parser.parse_args()))
sys.exit(0)
| danielhers/ucca | scripts/set_external_id_offline.py | Python | gpl-3.0 | 1,091 | 0.003666 |
# encoding: utf-8
import os
import subprocess
import sys
from workflow import Workflow3 as Workflow, MATCH_SUBSTRING
from workflow.background import run_in_background
import brew_actions
import helpers
GITHUB_SLUG = 'fniephaus/alfred-homebrew'
def execute(wf, cmd_list):
brew_arch = helpers.get_brew_arch(wf)
new_env = helpers.initialise_path(brew_arch)
cmd, err = subprocess.Popen(cmd_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=new_env).communicate()
if err:
return err
return cmd
def get_all_formulae():
return execute(wf, ['brew', 'formulae']).splitlines()
def get_installed_formulae():
return execute(wf, ['brew', 'list', '--versions']).splitlines()
def get_pinned_formulae():
return execute(wf, ['brew', 'list', '--pinned', '--versions']).splitlines()
def get_outdated_formulae():
return execute(wf, ['brew', 'outdated', '--formula']).splitlines()
def get_info():
return execute(wf, ['brew', 'info'])
def get_commands(wf, query):
result = execute(wf, ['brew', 'commands']).splitlines()
commands = [x for x in result if ' ' not in x]
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], commands, match_on=MATCH_SUBSTRING)
return commands
def get_all_services():
services_response = execute(wf, ['brew', 'services', 'list']).splitlines()
services_response.pop(0)
services = []
for serviceLine in services_response:
services.append({'name': serviceLine.split()[0], 'status': serviceLine.split()[1]})
return services
def filter_all_formulae(wf, query):
formulae = wf.cached_data('brew_all_formulae',
get_all_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_installed_formulae(wf, query):
formulae = wf.cached_data('brew_installed_formulae',
get_installed_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_pinned_formulae(wf, query):
formulae = wf.cached_data('brew_pinned_formulae',
get_pinned_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_outdated_formulae(wf, query):
formulae = wf.cached_data('brew_outdated_formulae',
get_outdated_formulae,
max_age=3600)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], formulae, match_on=MATCH_SUBSTRING)
return formulae
def filter_all_services(wf, query):
services = wf.cached_data('brew_all_services',
get_all_services,
session=True)
query_filter = query.split()
if len(query_filter) > 1:
return wf.filter(query_filter[1], services, key=lambda x: x['name'], match_on=MATCH_SUBSTRING)
return services
def add_service_actions(wf, service_name):
wf.add_item('Run Service',
'Run the service formula without registering to launch at login (or boot).',
autocomplete='services %s run' % service_name,
arg='brew services run %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Stop Service',
'Stop the service formula immediately and unregister it from launching at login (or boot).',
autocomplete='services %s stop' % service_name,
arg='brew services stop %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Start Service',
'Start the service formula immediately and register it to launch at login (or boot).',
autocomplete='services %s start' % service_name,
arg='brew services start %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
wf.add_item('Restart Service',
'Stop (if necessary) and start the service formula immediately and register it to launch '
'at login (or boot).',
autocomplete='services %s restart' % service_name,
arg='brew services restart %s' % service_name,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
def main(wf):
if wf.update_available:
wf.add_item('An update is available!',
autocomplete='workflow:update',
valid=False,
icon=helpers.get_icon(wf, 'cloud-download'))
# Check for brew installation
find_brew = helpers.brew_installed()
if not (find_brew['INTEL'] or find_brew['ARM']):
helpers.brew_installation_instructions(wf)
else:
# extract query
query = wf.args[0] if len(wf.args) else None
if (not query and
len(wf.cached_data('brew_outdated_formulae',
get_outdated_formulae,
max_age=3600)) > 0):
wf.add_item('Some of your formulae are outdated!',
autocomplete='outdated ',
valid=False,
icon=helpers.get_icon(wf, 'cloud-download'))
if query and query.startswith('install'):
for formula in filter_all_formulae(wf, query):
wf.add_item(formula, 'Install formula.',
arg='brew install %s' % formula,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('services'):
query_filter = query.split()
if len(query_filter) == 2 and query.endswith(' '):
service_name = query_filter[1]
add_service_actions(wf, service_name)
else:
services = filter_all_services(wf, query)
for service in services:
wf.add_item(service['name'], 'Select for action. Status: %s' % service['status'],
autocomplete='services %s ' % service['name'],
arg='',
valid=False,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('search'):
for formula in filter_all_formulae(wf, query):
wf.add_item(formula, 'Open formula on GitHub.',
arg='brew info --github %s' % formula,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('uninstall'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Uninstall formula.',
arg='brew uninstall %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('list'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Open formula on GitHub.',
arg='brew info --github %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('pin'):
for formula in filter_installed_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Pin formula.',
arg='brew pin %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
# delete cached file
wf.cache_data('brew_pinned_formulae', None)
elif query and query.startswith('unpin'):
for formula in filter_pinned_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Unpin formula.',
arg='brew unpin %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
# delete cached file
wf.cache_data('brew_pinned_formulae', None)
elif query and query.startswith('cat'):
for formula in filter_all_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Display the source to this formula.',
arg='brew cat %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('outdated'):
for formula in filter_outdated_formulae(wf, query):
name = formula.rsplit()[0]
wf.add_item(formula, 'Upgrade formula.',
arg='brew upgrade %s' % name,
valid=True,
icon=helpers.get_icon(wf, 'package'))
elif query and query.startswith('info'):
wf.add_item(get_info(),
autocomplete='',
icon=helpers.get_icon(wf, 'info'))
elif query and query.startswith('commands'):
for command in get_commands(wf, query):
wf.add_item(command, 'Run this command.',
arg='brew %s' % command,
valid=True,
icon=helpers.get_icon(wf, 'chevron-right'))
elif query and query.startswith('config'):
helpers.edit_settings(wf)
wf.add_item('`settings.json` has been opened.',
autocomplete='',
icon=helpers.get_icon(wf, 'info'))
else:
actions = brew_actions.ACTIONS
if len(wf.cached_data('brew_pinned_formulae',
get_pinned_formulae,
max_age=3600)) > 0:
actions.append({
'name': 'Unpin',
'description': 'Unpin formula.',
'autocomplete': 'unpin ',
'arg': '',
'valid': False,
})
# filter actions by query
if query:
actions = wf.filter(query, actions,
key=helpers.search_key_for_action,
match_on=MATCH_SUBSTRING)
if len(actions) > 0:
for action in actions:
wf.add_item(action['name'], action['description'],
uid=action['name'],
autocomplete=action['autocomplete'],
arg=action['arg'],
valid=action['valid'],
icon=helpers.get_icon(wf, 'chevron-right'))
else:
wf.add_item('No action found for "%s"' % query,
autocomplete='',
icon=helpers.get_icon(wf, 'info'))
if len(wf._items) == 0:
query_name = query[query.find(' ') + 1:]
wf.add_item('No formula found for "%s"' % query_name,
autocomplete='%s ' % query[:query.find(' ')],
icon=helpers.get_icon(wf, 'info'))
wf.send_feedback()
# refresh cache
cmd = ['/usr/bin/python', wf.workflowfile('brew_refresh.py')]
run_in_background('brew_refresh', cmd)
if __name__ == '__main__':
wf = Workflow(update_settings={'github_slug': GITHUB_SLUG})
sys.exit(wf.run(main))
| fniephaus/alfred-homebrew | src/brew.py | Python | mit | 12,427 | 0.000644 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
PyPI interface (see http://wiki.python.org/moin/PyPiXmlRpc)
"""
from datetime import datetime
import itertools
import re
import xmlrpclib
from django.template.defaultfilters import slugify
from package.models import Category, Package, Version
from pypi.versioning import highest_version
from celery.decorators import task
base_url = "http://pypi.python.org/pypi/"
PYPI = xmlrpclib.Server(base_url)
class Slurper(object):
""" Fetches data from PyPI """
def __init__(self, package):
self.package_name = package
self.dumb_category, created = Category.objects.get_or_create(
title='Python', slug='python')
self.dumb_category.save()
def get_latest_version_number(self, package_name, versions=None):
""" Returns the latest version number for a package """
if versions:
return highest_version(versions)
else:
return highest_version(PYPI.package_releases(package_name))
def get_or_create_package(self, package_name, version):
data = PYPI.release_data(package_name, version)
pypi_url = base_url + package_name
package, created = Package.objects.get_or_create(
title = data['name'],
slug = slugify(package_name),
category = self.dumb_category,
pypi_url = base_url + data['name']
)
package.repo_description = data['summary'] or data['description']
if not package.repo_url:
url = data.get("home_page", None) or data.get('project_url',"") or pypi_url
repo_pattern = '((?:http|https|git)://github.com/[^/]*/[^/]*)/{0,1}'
match = re.match(repo_pattern, url)
if match and match.group(1):
package.repo_url = match.group(1)
else:
# TODO do we want to assume this is a repo url?
# should there be more checking for repo patterns?
package.repo_url = url
package.save()
package.fetch_metadata()
return (package, created) | pythonchelle/opencomparison | apps/pypi/slurper.py | Python | mit | 2,191 | 0.010041 |
# Copyright 2004-2017 Tom Rothamel <pytom@bishoujo.us>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# The blacklist of OpenGL cards. Fields are:
# - A substring of the Renderer.
# - A substring of the Version.
# - True to allow shader rendering.
# - True to allow fixed-function rendering.
# If both of the last two entries are false, GL refuses to
# start.
BLACKLIST = [
# Crashes for Mugenjohncel.
("S3 Graphics DeltaChrome", "1.4 20.00", False, False),
# A bug in Mesa 7.9 and 7.10 (before 7.10.3) causes the system to
# fail to initialize the GLSL compiler.
# https://bugs.freedesktop.org/show_bug.cgi?id=35603
("Mesa", "Mesa 7.9", False, True),
("Mesa", "Mesa 7.10.3", True, True),
("Mesa", "Mesa 7.10", False, True),
# Default to allowing everything.
("", "", True, True),
]
| kfcpaladin/sze-the-game | renpy/gl/glblacklist.py | Python | mit | 1,846 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import json
import luigi
from luigi.contrib.esindex import CopyToIndex
class FakeDocuments(luigi.Task):
"""
Generates a local file containing 5 elements of data in JSON format.
"""
#: the date parameter.
date = luigi.DateParameter(default=datetime.date.today())
def run(self):
"""
Writes data in JSON format into the task's output target.
The data objects have the following attributes:
* `_id` is the default Elasticsearch id field,
* `text`: the text,
* `date`: the day when the data was created.
"""
today = datetime.date.today()
with self.output().open('w') as output:
for i in range(5):
output.write(json.dumps({'_id': i, 'text': 'Hi %s' % i,
'date': str(today)}))
output.write('\n')
def output(self):
"""
Returns the target output for this task.
In this case, a successful execution of this task will create a file on the local filesystem.
:return: the target output for this task.
:rtype: object (:py:class:`luigi.target.Target`)
"""
return luigi.LocalTarget(path='/tmp/_docs-%s.ldj' % self.date)
class IndexDocuments(CopyToIndex):
"""
This task loads JSON data contained in a :py:class:`luigi.target.Target` into an ElasticSearch index.
This task's input will the target returned by :py:meth:`~.FakeDocuments.output`.
This class uses :py:meth:`luigi.contrib.esindex.CopyToIndex.run`.
After running this task you can run:
.. code-block:: console
$ curl "localhost:9200/example_index/_search?pretty"
to see the indexed documents.
To see the update log, run
.. code-block:: console
$ curl "localhost:9200/update_log/_search?q=target_index:example_index&pretty"
To cleanup both indexes run:
.. code-block:: console
$ curl -XDELETE "localhost:9200/example_index"
$ curl -XDELETE "localhost:9200/update_log/_query?q=target_index:example_index"
"""
#: date task parameter (default = today)
date = luigi.DateParameter(default=datetime.date.today())
#: the name of the index in ElasticSearch to be updated.
index = 'example_index'
#: the name of the document type.
doc_type = 'greetings'
#: the host running the ElasticSearch service.
host = 'localhost'
#: the port used by the ElasticSearch service.
port = 9200
def requires(self):
"""
This task's dependencies:
* :py:class:`~.FakeDocuments`
:return: object (:py:class:`luigi.task.Task`)
"""
return FakeDocuments()
if __name__ == "__main__":
luigi.run(['IndexDocuments', '--local-scheduler'])
| rizzatti/luigi | examples/elasticsearch_index.py | Python | apache-2.0 | 3,410 | 0.00176 |
""" heritago URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from heritages import views
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register(r"api/users", views.UserDetail)
router.register(r"api/users", views.Users)
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r"api/v1/heritages/", include("heritages.urls")),
url(r"^api/users/me$", views.UserDetail.as_view({"get": "get_me"})),
# url(r"api/v1/annotations/", views.AnnotationListView.as_view()),
# url(r"api/v1/annotations/(?P<pk>\d+)$", views.AnnotationView.as_view()),
# user auth urls
# url(r'^$', views.diary, name='home'),
# url(r'^login/$', views.login, name='login'),
# url(r'^auth/$', views.auth_view, name='auth_view'),
# url(r'^logout/$', views.logout, name='logout'),
# url(r'^invalid/$', views.invalid_login, name='invalid_login'),
# url(r'^register/$', views.register_user, name='register_user'),
# url(r'^profile/$', views.user_profile, name='user_profile'),
# url(r'^change_password/$', views.change_password , name='password-change'),
]
urlpatterns += router.urls
| TalatCikikci/heritago | heritago/heritago/urls.py | Python | mit | 1,796 | 0.000557 |
my_name='Zed A. Shaw'
my_age=35 #notalie
my_height=74 #inches
my_weight=180 #lbs
my_eyes='Blue'
my_teeth='White'
my_hair='Brown'
print "Let's talk about %s." % my_name
print "He's %d inches tall." % my_height
print "He's %d pounds heavy." % my_weight
print "Actually that's not too heavy."
print "He's got %s eyes and %s hair." % (my_eyes,my_hair)
print "His teeth are usually %s depending on the coffee." % my_teeth
#this line is tricky,try to get it exactly right
print "If I add %d,%d,and %d I get %d." % (
my_age,my_height,my_weight,my_age+my_height+my_weight) | Jumpers/MysoftAutoTest | Step1-PythonBasic/Practices/yuxq/1-5/ex5.py | Python | apache-2.0 | 570 | 0.033333 |
##
# \namespace cross3d.classes
#
# \remarks [desc::commented]
#
# \author Mikeh
# \author Blur Studio
# \date 06/08/11
#
from fcurve import FCurve
from exceptions import Exceptions
from dispatch import Dispatch
from clipboard import Clipboard
from valuerange import ValueRange
from framerange import FrameRange
from filesequence import FileSequence
from timecode import Timecode
from flipbook import FlipBook
| blurstudio/cross3d | cross3d/classes/__init__.py | Python | mit | 434 | 0.016129 |
"""first commit
Revision ID: 97cd7f996752
Revises: 084658cb0aab
Create Date: 2017-05-20 06:49:09.431920
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '97cd7f996752'
down_revision = '084658cb0aab'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
| exleym/IWBT | alembic/versions/97cd7f996752_first_commit.py | Python | apache-2.0 | 552 | 0 |
# -*- coding: utf8 -*-
from __future__ import absolute_import
from django.conf.urls import url, include
from .views import MonitoringView
urlpatterns = [
url(r'^$', MonitoringView.as_view(), name="index"),
url(r'^info/', include('app.modules.monitoring_nodes.info.urls', namespace='info')),
url(r'^plugins/', include('app.modules.monitoring_nodes.plugins.urls', namespace="plugins")),
url(r'^nodes/', include('app.modules.monitoring_nodes.nodes.urls', namespace="nodes")),
url(r'^groups/', include('app.modules.monitoring_nodes.groups.urls', namespace="groups")),
url(r'^graphs/', include('app.modules.monitoring_nodes.graphs.urls', namespace="graphs")),
url(r'^configs/', include('app.modules.monitoring_nodes.configs.urls', namespace="configs")),
]
| netsuileo/sfu-cluster-dashboard | dashboard/app/modules/monitoring_nodes/urls.py | Python | lgpl-3.0 | 780 | 0.007692 |
# -*- coding: utf-8 -*-
import sys
import subprocess
import signal
import locale
from six import print_
from six.moves import urllib
import requests
class Color:
PURPLE = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
ORANGE = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
RESET = '\033[0m'
c = Color
# from http://hg.python.org/cpython/rev/768722b2ae0a/
def restore_signals():
signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
for sig in signals:
if hasattr(signal, sig):
signal.signal(getattr(signal, sig), signal.SIG_DFL)
def run_and_print(command, cwd=None):
p = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
bufsize=-1,
cwd=cwd,
preexec_fn=restore_signals,
universal_newlines=True)
chunks = []
encoding = locale.getdefaultlocale()[1] or 'ascii'
try:
while True:
chunk = p.stdout.readline()
if chunk != '':
try:
getattr(sys.stdout, 'buffer', sys.stdout).write(chunk.encode(encoding))
sys.stdout.flush()
except UnicodeDecodeError:
pass
chunks.append(chunk)
else:
break
finally:
p.stdout.close()
p.wait()
return ''.join(chunks)
def post_results(data, devnull):
url = 'https://serverscope.io/api/trials.txt'
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/plain',
'User-Agent': 'serverscope.io benchmark tool'
}
response = requests.post(url, data=urllib.parse.urlencode(data), headers=headers)
print_(response.text)
def get_geo_info():
"""Return geo location information."""
print_(c.GREEN + 'Retrieving server location... ' + c.RESET)
try:
cmd = ['curl', '-s', 'http://geoip.nekudo.com/api/']
geo = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
universal_newlines=True).communicate()[0]
except ValueError:
print_(c.RED + "geoip API error. Terminating..." + c.RESET)
sys.exit(1)
return geo
| selectnull/serverscope-benchmark | serverscope_benchmark/utils.py | Python | mit | 2,357 | 0.001273 |
from django.conf.urls.defaults import patterns
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
( r'^$', 'statserver.stats.views.browse' ),
( r'^stats/addnode$', 'statserver.stats.views.addnode' ),
( r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
| MiltosD/CEF-ELRC | misc/tools/statserver/urls.py | Python | bsd-3-clause | 427 | 0.032787 |
from __future__ import absolute_import
from .api import Request, Response
from .message import MessageSet
from .types import Int16, Int32, Int64, String, Array, Schema
class ProduceResponse_v0(Response):
API_KEY = 0
API_VERSION = 0
SCHEMA = Schema(
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('error_code', Int16),
('offset', Int64)))))
)
class ProduceResponse_v1(Response):
API_KEY = 0
API_VERSION = 1
SCHEMA = Schema(
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('error_code', Int16),
('offset', Int64))))),
('throttle_time_ms', Int32)
)
class ProduceResponse_v2(Response):
API_KEY = 0
API_VERSION = 2
SCHEMA = Schema(
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('error_code', Int16),
('offset', Int64),
('timestamp', Int64))))),
('throttle_time_ms', Int32)
)
class ProduceResponse_v3(Response):
API_KEY = 0
API_VERSION = 3
SCHEMA = ProduceResponse_v2.SCHEMA
class ProduceRequest_v0(Request):
API_KEY = 0
API_VERSION = 0
RESPONSE_TYPE = ProduceResponse_v0
SCHEMA = Schema(
('required_acks', Int16),
('timeout', Int32),
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('messages', MessageSet)))))
)
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
class ProduceRequest_v1(Request):
API_KEY = 0
API_VERSION = 1
RESPONSE_TYPE = ProduceResponse_v1
SCHEMA = ProduceRequest_v0.SCHEMA
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
class ProduceRequest_v2(Request):
API_KEY = 0
API_VERSION = 2
RESPONSE_TYPE = ProduceResponse_v2
SCHEMA = ProduceRequest_v1.SCHEMA
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
class ProduceRequest_v3(Request):
API_KEY = 0
API_VERSION = 3
RESPONSE_TYPE = ProduceResponse_v3
SCHEMA = Schema(
('transactional_id', String('utf-8')),
('required_acks', Int16),
('timeout', Int32),
('topics', Array(
('topic', String('utf-8')),
('partitions', Array(
('partition', Int32),
('messages', MessageSet)))))
)
def expect_response(self):
if self.required_acks == 0: # pylint: disable=no-member
return False
return True
ProduceRequest = [
ProduceRequest_v0, ProduceRequest_v1, ProduceRequest_v2,
ProduceRequest_v3
]
ProduceResponse = [
ProduceResponse_v0, ProduceResponse_v1, ProduceResponse_v2,
ProduceResponse_v2
]
| louisLouL/pair_trading | capstone_env/lib/python3.6/site-packages/kafka/protocol/produce.py | Python | mit | 3,232 | 0.001238 |
# -*- coding: utf-8 -*-
"""
* Copyright (C) 2010-2014 Loic BLOT <http://www.unix-experience.fr/>
*
* This program is free software you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import json
from django.shortcuts import render
from django.http import HttpResponse
from django.utils.translation import ugettext_lazy as _
import Cisco
def getSNMPMib(request):
if request.method == "GET" and "mib" in request.GET and "vendor" in request.GET:
# Only Cisco is supported at this time
if request.GET["vendor"] == "cisco" and request.GET["mib"] in Cisco.Mibs:
return HttpResponse(json.dumps(Cisco.Mibs[request.GET["mib"]]), content_type="application/json")
return HttpResponse(_('Err-Wrong-Request'))
def getPortMibValue(request):
if request.method == "GET" and "vendor" in request.GET and "device" in request.GET and "mib" in request.GET:
if request.GET["vendor"] == "cisco":
SwitchObj = Cisco.CiscoSwitch()
mib = request.GET["mib"]
if SwitchObj.setDevice(request.GET["device"]) and mib in Cisco.Mibs:
if "pid" in request.GET and SwitchObj.setPortId(request.GET["pid"]):
# We don't call methods here, it's faster to use the dictionnary
return HttpResponse(SwitchObj.snmpget(Cisco.Mibs[mib]))
else:
# Invalid the port ID
SwitchObj.setPortId("")
return HttpResponse(SwitchObj.snmpget(Cisco.Mibs[mib]))
return HttpResponse(_('Err-Wrong-Request'))
def setPortMibValue(request):
if request.method == "GET" and "vendor" in request.GET and "device" in request.GET and "mib" in request.GET and "value" in request.GET:
if request.GET["vendor"] == "cisco":
SwitchObj = Cisco.CiscoSwitch()
mib = request.GET["mib"]
if SwitchObj.setDevice(request.GET["device"]) and mib in Cisco.Mibs:
if "pid" in request.GET and SwitchObj.setPortId(request.GET["pid"]):
# We don't call methods here, it's faster to use the dictionnary
return HttpResponse(SwitchObj.snmpset(Cisco.Mibs[mib],request.GET["value"]))
else:
# Invalid the port ID
SwitchObj.setPortId("")
return HttpResponse(SwitchObj.snmpset(Cisco.Mibs[mib],request.GET["value"]))
return HttpResponse(_('Err-Wrong-Request'))
def saveDeviceConfig(request):
if request.method == "GET" and "vendor" in request.GET and "device" in request.GET:
if request.GET["vendor"] == "cisco":
SwitchObj = Cisco.CiscoSwitch()
if SwitchObj.setDevice(request.GET["device"]):
return HttpResponse(SwitchObj.saveDeviceConfig())
return HttpResponse(_('Err-Wrong-Request'))
| nerzhul/Z-Eye | service/WebApp/Z_Eye/engine/Switches/API/__init__.py | Python | gpl-2.0 | 3,146 | 0.020979 |
# -*- coding: utf-8 -*-
#
# ask-undrgz system of questions uses data from underguiz.
# Copyright (c) 2010, Nycholas de Oliveira e Oliveira <nycholas@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# # Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# # Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# # Neither the name of the Nycholas de Oliveira e Oliveira nor the names of
# its contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Django settings for ask_undrgz project.
import os
ROOT_PATH = os.path.realpath(os.path.dirname(__file__))
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Nycholas de Oliveira e Oliveira', 'nycholas@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Twitter
TWITTER_USERNAME = 'ask_undrgz'
TWITTER_PASSWORD = 'XXX'
TWITTER_CONSUMER_KEY = 'XXX'
TWITTER_CONSUMER_SECRET = 'XXX'
TWITTER_OAUTH_TOKEN = 'XXX'
TWITTER_OAUTH_TOKEN_SECRET = 'XXX'
TWITTER_CALLBACK = 'http://ask-undrgz.appspot.com/_oauth/twitter/callback/'
if DEBUG:
TWITTER_CALLBACK = 'http://localhost:8080/_oauth/twitter/callback/'
ugettext = lambda s: s
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = (
('en', ugettext('English')),
('pt-BR', ugettext('Portuguese Brazil')),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'ti*(j(^fvi!&1cu7#sw7mkhb=dgl5v_$1&v5=wom_l4y!x9j*@'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'ask_undrgz.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
ROOT_PATH + '/templates',
)
INSTALLED_APPS = (
# 'django.contrib.auth',
'django.contrib.contenttypes',
# 'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
'ask_undrgz.question',
)
| nycholas/ask-undrgz | src/ask-undrgz/ask_undrgz/settings.py | Python | bsd-3-clause | 5,653 | 0.002653 |
#!/usr/bin/env python
# Copyright (c) 2008 Aldo Cortesi
# Copyright (c) 2011 Mounier Florian
# Copyright (c) 2012 dmpayton
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 roger
# Copyright (c) 2014 Pedro Algarvio
# Copyright (c) 2014-2015 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import textwrap
from setuptools import setup
from setuptools.command.install import install
class CheckCairoXcb(install):
def cairo_xcb_check(self):
try:
from cairocffi import cairo
cairo.cairo_xcb_surface_create
return True
except AttributeError:
return False
def finalize_options(self):
if not self.cairo_xcb_check():
print(textwrap.dedent("""
It looks like your cairocffi was not built with xcffib support. To fix this:
- Ensure a recent xcffib is installed (pip install 'xcffib>=0.3.2')
- The pip cache is cleared (remove ~/.cache/pip, if it exists)
- Reinstall cairocffi, either:
pip install --no-deps --ignore-installed cairocffi
or
pip uninstall cairocffi && pip install cairocffi
"""))
sys.exit(1)
install.finalize_options(self)
long_description = """
A pure-Python tiling window manager.
Features
========
* Simple, small and extensible. It's easy to write your own layouts,
widgets and commands.
* Configured in Python.
* Command shell that allows all aspects of
Qtile to be managed and inspected.
* Complete remote scriptability - write scripts to set up workspaces,
manipulate windows, update status bar widgets and more.
* Qtile's remote scriptability makes it one of the most thoroughly
unit-tested window mangers around.
"""
if '_cffi_backend' in sys.builtin_module_names:
import _cffi_backend
requires_cffi = "cffi==" + _cffi_backend.__version__
else:
requires_cffi = "cffi>=1.1.0"
# PyPy < 2.6 compatibility
if requires_cffi.startswith("cffi==0."):
cffi_args = dict(
zip_safe=False
)
else:
cffi_args = dict(cffi_modules=[
'libqtile/ffi_build.py:pango_ffi',
'libqtile/ffi_build.py:xcursors_ffi'
])
dependencies = ['xcffib>=0.3.2', 'cairocffi>=0.7', 'six>=1.4.1', requires_cffi]
if sys.version_info >= (3, 4):
pass
elif sys.version_info >= (3, 3):
dependencies.append('asyncio')
else:
dependencies.append('trollius')
setup(
name="qtile",
version="0.10.6",
description="A pure-Python tiling window manager.",
long_description=long_description,
classifiers=[
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: Unix",
"Topic :: Desktop Environment :: Window Managers",
],
keywords="qtile tiling window manager",
author="Aldo Cortesi",
author_email="aldo@nullcube.com",
maintainer="Tycho Andersen",
maintainer_email="tycho@tycho.ws",
url="http://qtile.org",
license="MIT",
install_requires=dependencies,
setup_requires=dependencies,
extras_require={
'ipython': ["ipykernel", "jupyter_console"],
},
packages=['libqtile',
'libqtile.interactive',
'libqtile.layout',
'libqtile.scripts',
'libqtile.widget',
'libqtile.resources'
],
package_data={'libqtile.resources': ['battery-icons/*.png']},
entry_points={
'console_scripts': [
'qtile = libqtile.scripts.qtile:main',
'qtile-run = libqtile.scripts.qtile_run:main',
'qtile-top = libqtile.scripts.qtile_top:main',
'qshell = libqtile.scripts.qshell:main',
]
},
scripts=[
'bin/iqshell',
],
data_files=[
('share/man/man1', ['resources/qtile.1',
'resources/qshell.1'])],
cmdclass={'install': CheckCairoXcb},
**cffi_args
)
| de-vri-es/qtile | setup.py | Python | mit | 5,466 | 0.000732 |
from osgeo import gdal, osr, ogr
import numpy as np
import scipy.misc
import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("banner-generator")
def create_raster_from_band( red, green, blue, output_file):
logger.debug("Create big raster in output_file : %s"%output_file)
red_ds = gdal.Open(red)
nx = red_ds.GetRasterBand(1).XSize
ny = red_ds.GetRasterBand(1).YSize
dst_ds = gdal.GetDriverByName('GTiff').Create(output_file, ny, nx, 3, gdal.GDT_UInt16)
dst_ds.SetGeoTransform(red_ds.GetGeoTransform())
dst_ds.SetProjection(red_ds.GetProjection())
def write_band(band, index_band):
logger.debug("Write band : %s"%index_band)
band_ds = gdal.Open(band)
array = band_ds.GetRasterBand(1).ReadAsArray()
dst_ds.GetRasterBand(index_band).WriteArray(array)
write_band(red, 1)
write_band(blue, 2)
write_band(green, 3)
dst_ds.FlushCache()
dst_ds = None
logger.debug("Big raster is write in output_file : %s"%output_file)
def create_png_from_raster(raster_file, output_file, blue_clip=(0.,2500.), red_clip=(0.,2500.), green_clip=(0.,2500.)):
logger.debug("Create big png in output_file : %s"%output_file)
raster_ds = gdal.Open(raster_file)
bytes_max = 255.
if blue_clip[0] > blue_clip[1] :
logger.error("Maximum clip value should be higther than the Minimum clip value")
return False
if red_clip[0] > red_clip[1] :
logger.error("Maximum clip value should be higther than the Minimum clip value")
return False
if green_clip[0] > green_clip[1] :
logger.error("Maximum clip value should be higther than the Minimum clip value")
return False
def clip_array(band_index, clip):
array = np.array(raster_ds.GetRasterBand(band_index).ReadAsArray())
array = np.clip(array, clip[0], clip[1])
array = array - clip[0]
array = (np.float32(array)*bytes_max)/(clip[1]-clip[0])
array = array.astype(int)
return array
logger.debug("Prepare red color, clip raw value at %s, %s"%red_clip)
red_array = clip_array(1, red_clip)
logger.debug("Prepare green color, clip raw value at %s, %s"%green_clip)
green_array = clip_array(2, green_clip)
logger.debug("Prepare blue color, clip raw value at %s, %s"%blue_clip)
blue_array = clip_array(3, blue_clip)
rgb = np.zeros((len(red_array), len(red_array[0]), 3), dtype=np.uint8)
rgb[..., 0] = red_array
rgb[..., 1] = green_array
rgb[..., 2] = blue_array
logger.debug("Writing png file in %s"%output_file)
scipy.misc.imsave(output_file, rgb)
return True
def get_x_y_for_lon_lat(raster_file, lon, lat):
logger.debug("Compute x and y from lon lat")
logger.debug("Longitude : %s"%lon)
logger.debug("Latitude : %s"%lat)
sref = osr.SpatialReference()
sref.ImportFromEPSG(4326)
# create a geometry from coordinates
point = ogr.Geometry(ogr.wkbPoint)
point.AddPoint(lon, lat)
raster_ds = gdal.Open(raster_file)
dref = osr.SpatialReference()
dref.ImportFromWkt(raster_ds.GetProjection())
ct = osr.CoordinateTransformation(sref,dref)
point.Transform(ct)
point_x = point.GetX()
point_y = point.GetY()
logger.debug("Point value in raster proj")
logger.debug("Point x : %s"%point_x)
logger.debug("Point y : %s"%point_y)
ulx, xres, xskew, uly, yskew, yres = raster_ds.GetGeoTransform()
logger.debug("Upper left coordinate in proj")
logger.debug("Point x : %s"%ulx)
logger.debug("Point x : %s"%uly)
lrx = ulx + (raster_ds.RasterXSize * xres)
lry = uly + (raster_ds.RasterYSize * yres)
logger.debug("Lower rigth coordinate in proj")
logger.debug("Point x : %s"%lrx)
logger.debug("Point x : %s"%lry)
logger.debug("Raster resolution")
logger.debug("Res on X : %s"%xres)
logger.debug("Res on Y : %s"%yres)
point_x = (point_x- ulx)/xres
point_y = (point_y- uly)/yres
return (int(point_x), int(point_y) )
def extract_banner(img_path, x, y, size_x, size_y, out_path):
logger.debug("Extract banner")
y_min = int(y-size_y/2)
y_max = y_min+size_y
x_min = int(x-size_x/2)
x_max = x_min+size_x
logger.debug("Extract data from table")
logger.debug("Min x : %s"%x_min)
logger.debug("Max x : %s"%x_max)
logger.debug("Min y : %s"%y_min)
logger.debug("Max y : %s"%y_max)
img = scipy.misc.imread(img_path)
y_min = max(0, min(y_min, len(img)))
y_max = max(0, min(y_max, len(img)))
x_min = max(0, min(x_min, len(img[0])))
x_max = max(0, min(x_max, len(img[0])))
logger.debug("After clamp")
logger.debug("Min x : %s"%x_min)
logger.debug("Max x : %s"%x_max)
logger.debug("Min y : %s"%y_min)
logger.debug("Max y : %s"%y_max)
logger.debug("Image y: %s"%len(img))
logger.debug("Image x: %s"%len(img[0]))
if y_max == y_min:
logger.error("After clamp, image size is Null")
return False
if x_max == x_min:
logger.error("After clamp, image size is Null")
return False
rgb = np.zeros((y_max-y_min, x_max-x_min, 3), dtype=np.uint8)
rgb[..., 0] = img[y_min:y_max,x_min:x_max, 0]
rgb[..., 1] = img[y_min:y_max,x_min:x_max, 1]
rgb[..., 2] = img[y_min:y_max,x_min:x_max, 2]
logger.debug("Write banner in output file %s", out_path)
scipy.misc.imsave(out_path, rgb)
return True
if __name__ == '__main__':
logger.setLevel(logging.DEBUG)
tiff_file = "/tmp/out.tiff"
big_png_file = "/tmp/out_big.png"
banner_file = "/tmp/out.png"
# create_raster_from_band( '/tmp/tmp0_if50g9','/tmp/tmpz61ja8cq','/tmp/tmp7dl287r9', tiff_file)
# x, y = get_x_y_for_lon_lat(tiff_file, 1.433333, 43.6)
# create_png_from_raster(tiff_file, big_png_file, red_clip=(250., 2500.), blue_clip=(250., 2500.), green_clip=(250., 2500.))
# extract_banner(big_png_file, x, y,1400, 800, banner_file)
extract_banner(big_png_file, 0, 0,1400, 800, banner_file)
extract_banner(big_png_file, 10980, 10980,1400, 800, banner_file)
extract_banner(big_png_file, 20980, 20980,1400, 800, banner_file)
| yoms/sentinel-banner-generator | banner_generator.py | Python | apache-2.0 | 6,246 | 0.013289 |
import unittest
import day02.solution as solution
class TestDay02(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_part_one(self):
test_input = [
'abcdef',
'bababc',
'abbcde',
'abcccd',
'aabcdd',
'abcdee',
'ababab',
]
self.assertEqual('12', solution.part_one(test_input))
def test_part_two(self):
test_input = [
'abcde',
'fghij',
'klmno',
'pqrst',
'fguij',
'axcye',
'wvxyz',
]
self.assertEqual('fgij', solution.part_two(test_input))
def test_has_n_letters(self):
self.assertFalse(solution.has_n_letters('abcdef', 2))
self.assertTrue(solution.has_n_letters('bababc', 2))
self.assertTrue(solution.has_n_letters('abbcde', 2))
self.assertFalse(solution.has_n_letters('abcccd', 2))
self.assertTrue(solution.has_n_letters('aabcdd', 2))
self.assertTrue(solution.has_n_letters('abcdee', 2))
self.assertFalse(solution.has_n_letters('ababab', 2))
self.assertFalse(solution.has_n_letters('abcdef', 3))
self.assertTrue(solution.has_n_letters('bababc', 3))
self.assertFalse(solution.has_n_letters('abbcde', 3))
self.assertTrue(solution.has_n_letters('abcccd', 3))
self.assertFalse(solution.has_n_letters('aabcdd', 3))
self.assertFalse(solution.has_n_letters('abcdee', 3))
self.assertTrue(solution.has_n_letters('ababab', 3))
def test_find_difference_in_ids(self):
n_different, differing_letters, same_letters = solution.find_difference_in_ids('abcde', 'axcye')
self.assertEqual(2, n_different)
n_different, differing_letters, same_letters = solution.find_difference_in_ids('fghij', 'fguij')
self.assertEqual(1, n_different)
| T-R0D/JustForFun | aoc2018/day02/test/test_solution.py | Python | gpl-2.0 | 1,946 | 0.001542 |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='agarnet',
packages=['agarnet'],
py_modules=['agarnet'],
version='0.2.4',
description='agar.io client and connection toolkit',
install_requires=['websocket-client>=0.32.0'],
author='Gjum',
author_email='code.gjum@gmail.com',
url='https://github.com/Gjum/agarnet',
license='GPLv3',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Education',
'Topic :: Games/Entertainment',
],
)
| Gjum/agarnet | setup.py | Python | gpl-3.0 | 1,095 | 0.001826 |
#!/usr/bin/env python
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import scipy.io.netcdf as netcdf
import spoisson
import def_radius
from scipy import interpolate
from scipy.interpolate import interp1d
import glob
#plt.ion()
binprec = '>f4'
flag_conf = 2 # 0: samelson, 1: grooms 2: basilisk
#% ================== GRID =====================================
rSphere = 6370.e3
deg2m = 2*np.pi*rSphere/360.0
gg = 9.8
alphaT = 2e-4
si_x = 100
si_y = 100
#si_x = 720
#si_y = 720
if flag_conf == 0:
si_z = 33
elif flag_conf == 1:
si_z = 31
elif flag_conf == 2:
si_z = 30
si_x1 = si_x + 1
si_y1 = si_y + 1
# in m
if flag_conf == 0 :
Lx = 5000.0e3
Ly = 5000.0e3
elif flag_conf == 1:
Lx = 3000.0e3
Ly = 3000.0e3
elif flag_conf == 2:
Lx = 5000.0e3
Ly = 5000.0e3
dx = Lx/si_x;
dy = Ly/si_y;
xx = Lx*(np.arange(0,si_x) + 0.5)/(1.0*si_x)
yy = Ly*(np.arange(0,si_y) + 0.5)/(1.0*si_y)
xx1 = Lx*(np.arange(0,si_x+1) )/(1.0*si_x)
yy1 = Ly*(np.arange(0,si_y+1) )/(1.0*si_y)
xg,yg = np.meshgrid(xx,yy)
xu,yu = np.meshgrid(xx1[:-1],yy)
xv,yv = np.meshgrid(xx,yy1[:-1])
xc,yc = np.meshgrid(xx1,yy1)
dx1 = dx*np.ones((si_x))
dy1 = dy*np.ones((si_y))
if flag_conf == 0:
dz1 = np.array([ 37.96964884, 64.27943707, 53.47713828, 55.25052547,
57.14580417, 59.17549133, 61.35478616, 63.70082498,
66.23372436, 68.97643209, 71.95606828, 75.20511746,
78.76157761, 82.67134428, 86.99014783, 91.7853415 ,
97.14066982, 103.16058993, 109.97712612, 117.75970459,
126.72990561, 137.18292117, 149.52003956, 164.30348158,
182.34416842, 204.85766232, 233.75503719, 272.22827077,
326.05469227, 406.94121271, 543.09982806, 532.52164274,
217.48963743])
elif flag_conf == 1:
dz1 = np.zeros((si_z))
dz1[0:4] = 25.
dz1[4:8] = 50.
dz1[8:12] = 75.
dz1[12:16] = 100.
dz1[16:21] = 150.
dz1[21:26] = 200.
dz1[26:] = 250.
elif flag_conf == 2:
dz1 = 5000/si_z*np.ones((si_z))
# # 1 layer configuration
# si_z = 1
# dz1 = np.zeros((si_z))
# dz1[0] = 4000.0
zz = np.reshape(dz1.cumsum(),(si_z,1,1))
dz_fi2 = dz1/2.0
dz2 = np.array([dz_fi2[i] + dz_fi2[i+1] for i in range(len(dz_fi2)-1)])
dz2 = np.reshape(dz2[0:si_z-1],(si_z-1,1,1))
dx1.astype(binprec).tofile('dx.box')
dy1.astype(binprec).tofile('dy.box')
dz1.astype(binprec).tofile('dz.box')
# ==== physical parameters
if flag_conf == 0:
fMin = 3.78e-05
fMax = 1.3e-4
elif flag_conf == 1:
fMin = 4.5e-5
fMax = 1.0e-4
if flag_conf == 2:
fMin = 3.e-05
fMax = 1.3e-4
fmid = 0.5*(fMin + fMax)
beta = (fMax-fMin)/Ly
ff = np.linspace(fMin,fMax,si_y)
print('f_south = {0}; beta = {1}'.format(fMin,beta) )
#%==================== LAND ===================================
landh = np.zeros((si_y,si_x));
H = zz[-1].squeeze()
landh = -H + landh
# walls
landh[:,0] = 0.0
landh[-1,:] = 0.0
landh.astype(binprec).tofile('topog.box')
#%=============== Surface forcing ===================================
# -- temperature --
sst = np.zeros((si_y,si_x));
if flag_conf == 0:
TS = 40.0 # should be 50, but I chose 40 # because the flux is a the top fo the ekman layer
TN = 0.0
elif flag_conf == 1:
TS = 22.0
TN = 2.0
elif flag_conf == 2:
TS = 22.0
TN = 2.0
sst = (TN-TS)*yg/Ly + TS
#thetaClimFile
sst.astype(binprec).tofile('sstclim.box')
# relax time scale (grooms)
rho0 = 1023.0
Cp = 4000.0
tauThetaClimRelax = rho0*Cp*dz1[0]/35. # 35 Watts per square meter per degree Kelvin
# relax time scale (samelson 97)
#(gamma*U*D/L/dz[1]) = 5*6e-6/37 ~ 15 days
#tauThetaClimRelax = 1233333.0
# I (arbitrarily..) set it to 50 days
tauThetaClimRelax = 4320000
# -- wind --
windx = np.zeros((si_y,si_x));
if flag_conf == 0:
tauW = 0.4
elif flag_conf == 1:
tauW = 0.2
elif flag_conf == 2:
tauW = 0.4
windx = -tauW*np.sin(2*np.pi*yg/Ly )
windx = windx*ff.reshape(si_y,1)/fMin
windx.astype(binprec).tofile('windx.box')
#% ============== background density profile ===================
# linear stratification
dep_l = np.linspace(0,H,si_z)
temp_f = (TN-TS)*(dep_l/H) + TS
if si_z > 1:
# interpolate on the new vertical grid
func2 = interp1d(dep_l, temp_f)
temp_i = func2(zz)
else:
temp_i = 1.0*temp_f
temp_i = temp_i.reshape((si_z,1,1))
temp_i.astype(binprec).tofile('tref.box')
#sref.astype(binprec).tofile('sref.box')
#%=============== initial conditions ===================================
# ### ideal ###
# uvel = np.zeros((si_z,si_y,si_x));
# vvel = np.zeros((si_z,si_y,si_x));
# theta = np.zeros((si_z,si_y,si_x));
# eta = np.zeros((si_y,si_x));
# theta = theta + 4.0
# #theta = theta + temp_i - TN
# #theta = theta*(1-yg/Ly) + TN
# uvel.astype(binprec).tofile('uinit.box')
# vvel.astype(binprec).tofile('vinit.box')
# theta.astype(binprec).tofile('tinit.box')
# eta.astype(binprec).tofile('einit.box')
#### from PG ###
dir0 = './data_input/'
if flag_conf == 0:
file1 = 'var_proj_s.nc'
f1 = netcdf.netcdf_file(dir0 + file1,'r')
uvel = f1.variables['u' ][:,:,:].copy()
vvel = f1.variables['v' ][:,:,:].copy()
theta = f1.variables['ti'][:,:,:].copy()
elif flag_conf == 2:
# PG scales
#L = 5000e3 # m
H = 5000 # m
beta = 2.0e-11 # 1/m/s
N2 = 1e-6 # (1/s**2)
Bs = N2*H
Thetas = Bs/gg/alphaT # 1/g alpha
Us = N2*H**2/(beta*Lx**2)
fnot = 3e-5
gg = 9.80665 # nemo value
ff = fnot + beta*yg # should be at u and v points
fmid = fnot + 0.5*Ly*beta
fileb = 'b*'
fileu = 'u*'
allfilesb = sorted(glob.glob(dir0 + fileb));
allfilesu = sorted(glob.glob(dir0 + fileu));
# dimensions
b = np.fromfile(allfilesb[0],'f4')
N = int(b[0])
N1 = N + 1
nl2 = int(len(b)/N1**2)
nl = nl2 - 2
b = np.fromfile(allfilesb[-1],'f4').reshape(nl2,N1,N1).transpose(0,2,1)
uv = np.fromfile(allfilesu[-1],'f4').reshape(2*nl2,N1,N1).transpose(0,2,1)
theta = Thetas*(b[1:-1,1:,1:] - b.min()) + 2.0
uvel = Us*uv[2:-2:2,1:,1:]
vvel = Us*uv[3:-2:2,1:,1:]
si_zpg,si_ypg,si_xpg = theta.shape
dxpg = dx*si_x/si_xpg
# compute pressure for SSH
dudy = np.diff(uvel,1,1)/dxpg
dvdx = np.diff(vvel,1,2)/dxpg
vort = dvdx[0,:-1,:] - dudy[0,:,:-1]
psi = spoisson.sol(vort[:])
psi = psi.reshape((si_xpg-1,si_ypg-1))
psi = psi*dxpg*dxpg*fmid/gg
eta = np.zeros((si_ypg,si_xpg))
eta[:-1,:-1] = psi
# old grid
xx = np.linspace(0,1,si_xpg)
yy = np.linspace(0,1,si_ypg)
xog,yog = np.meshgrid(xx,yy)
xn = np.linspace(0,1,si_x)
yn = np.linspace(0,1,si_y)
xng,yng = np.meshgrid(xn,yn)
uvel_n = np.zeros((si_z,si_y,si_x))
vvel_n = np.zeros((si_z,si_y,si_x))
theta_n = np.zeros((si_z,si_y,si_x))
eta_n = np.zeros((si_y,si_x))
for nz in range(0,si_z):
fint = interpolate.interp2d(xx, yy,uvel[nz,:,:], kind='cubic')
uvel_n[nz,:,:] = fint(xn,yn)
fint = interpolate.interp2d(xx, yy,vvel[nz,:,:], kind='cubic')
vvel_n[nz,:,:] = fint(xn,yn)
fint = interpolate.interp2d(xx, yy,theta[nz,:,:], kind='cubic')
theta_n[nz,:,:] = fint(xn,yn)
fint = interpolate.interp2d(xx, yy,eta, kind='cubic')
eta_n = fint(xn,yn)
#np.savetxt('upg.dat',uvel_n[0,:,:])
#np.savetxt('sstpg.dat',theta_n[0,:,:])
uvel_n.astype(binprec).tofile('uinit.box')
vvel_n.astype(binprec).tofile('vinit.box')
theta_n.astype(binprec).tofile('tinit.box')
eta_n.astype(binprec).tofile('einit.box')
#---------------------
# ------ RBCS --------
#---------------------
tmask = np.ones((si_z,si_y,si_x))
tmask.astype(binprec).tofile('tmask.box')
# relax to initial conditions
theta_n.astype(binprec).tofile('trelax.box')
# compute relaxation length scale
N2 = -gg*alphaT*np.diff(theta_n,axis=0)/dz2
N2_min = 1e-7
N2 = np.where(N2<N2_min, N2_min, N2)
gp = N2*dz2
lmax = 500e3
filt_len = np.zeros((si_y,si_x))
for nx in range(0,si_x):
for ny in range(0,si_y):
rd = def_radius.cal_rad(dz1,gp[:,ny,nx],ff[ny,nx])
filt_len[ny,nx] = np.min([10*rd[1],lmax])
# relaxation near the boundaries
def shape(x,sigma):
return (1-np.exp(-x**2/(2*sigma**2)))
dist = 500e3
filt_bdy = lmax*shape(xg,dist)*shape(xg-Lx,dist)*shape(yg,dist)*shape(yg-Lx,dist)
filt_len = np.where(filt_len<filt_bdy, filt_len, filt_bdy)
filt_len.astype(binprec).tofile('filter_length.box')
# # temporary
# tinit = np.random.rand(si_z,si_y,si_x)
# tinit.astype(binprec).tofile('tinit.box')
# trelax = np.zeros((si_z,si_y,si_x))
# trelax.astype(binprec).tofile('trelax.box')
| bderembl/mitgcm_configs | test_pg_hr/input/mygendata.py | Python | mit | 8,360 | 0.035167 |
__author__ = 'ktisha'
def foo(x):
return x
class A():
@staticmethod
@accepts(int, int)
def my_<caret>method():
print "Smth" | IllusionRom-deprecated/android_platform_tools_idea | python/testData/quickFixes/PyMakeMethodStaticQuickFixTest/decoWithParams_after.py | Python | apache-2.0 | 148 | 0.040541 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PyQt4.QtGui import QLabel
class StatusBar(QLabel):
def __init__(self, status, parent=None):
super(StatusBar, self).__init__(parent)
self.setToolTip("game status bar")
self.setText(status)
def setStatus(self, status):
self.setText(status)
if __name__ == '__main__':
import sys
from PyQt4.QtGui import QApplication
app = QApplication(sys.argv)
widget = StatusBar("R: {} T: {}\tPhase: {}".format(1, 2, 'Move'))
widget.show()
sys.exit(app.exec_())
| aelkikhia/pyduel_engine | pyduel_gui/widgets/game_status_widget.py | Python | apache-2.0 | 560 | 0.003571 |
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from tempest import clients
from tempest.common.utils import data_utils
from tempest import config
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = log.getLogger(__name__)
class TestSecurityGroupsBasicOps(manager.NetworkScenarioTest):
"""The test suite for security groups
This test suite assumes that Nova has been configured to
boot VM's with Neutron-managed networking, and attempts to
verify cross tenant connectivity as follows
ssh:
in order to overcome "ip namespace", each tenant has an "access point"
VM with floating-ip open to incoming ssh connection allowing network
commands (ping/ssh) to be executed from within the
tenant-network-namespace
Tempest host performs key-based authentication to the ssh server via
floating IP address
connectivity test is done by pinging destination server via source server
ssh connection.
success - ping returns
failure - ping_timeout reached
multi-node:
Multi-Node mode is enabled when CONF.compute.min_compute_nodes > 1.
Tests connectivity between servers on different compute nodes.
When enabled, test will boot each new server to different
compute nodes.
setup:
for primary tenant:
1. create a network&subnet
2. create a router (if public router isn't configured)
3. connect tenant network to public network via router
4. create an access point:
a. a security group open to incoming ssh connection
b. a VM with a floating ip
5. create a general empty security group (same as "default", but
without rules allowing in-tenant traffic)
tests:
1. _verify_network_details
2. _verify_mac_addr: for each access point verify that
(subnet, fix_ip, mac address) are as defined in the port list
3. _test_in_tenant_block: test that in-tenant traffic is disabled
without rules allowing it
4. _test_in_tenant_allow: test that in-tenant traffic is enabled
once an appropriate rule has been created
5. _test_cross_tenant_block: test that cross-tenant traffic is disabled
without a rule allowing it on destination tenant
6. _test_cross_tenant_allow:
* test that cross-tenant traffic is enabled once an appropriate
rule has been created on destination tenant.
* test that reverse traffic is still blocked
* test than reverse traffic is enabled once an appropriate rule has
been created on source tenant
7._test_port_update_new_security_group:
* test that traffic is blocked with default security group
* test that traffic is enabled after updating port with new security
group having appropriate rule
8. _test_multiple_security_groups: test multiple security groups can be
associated with the vm
assumptions:
1. alt_tenant/user existed and is different from primary_tenant/user
2. Public network is defined and reachable from the Tempest host
3. Public router can either be:
* defined, in which case all tenants networks can connect directly
to it, and cross tenant check will be done on the private IP of the
destination tenant
or
* not defined (empty string), in which case each tenant will have
its own router connected to the public network
"""
credentials = ['primary', 'alt', 'admin']
class TenantProperties(object):
"""helper class to save tenant details
id
credentials
network
subnet
security groups
servers
access point
"""
def __init__(self, credentials):
self.manager = clients.Manager(credentials)
# Credentials from manager are filled with both names and IDs
self.creds = self.manager.credentials
self.network = None
self.subnet = None
self.router = None
self.security_groups = {}
self.servers = list()
def set_network(self, network, subnet, router):
self.network = network
self.subnet = subnet
self.router = router
@classmethod
def skip_checks(cls):
super(TestSecurityGroupsBasicOps, cls).skip_checks()
if CONF.baremetal.driver_enabled:
msg = ('Not currently supported by baremetal.')
raise cls.skipException(msg)
if CONF.network.port_vnic_type in ['direct', 'macvtap']:
msg = ('Not currently supported when using vnic_type'
' direct or macvtap')
raise cls.skipException(msg)
if not (CONF.network.project_networks_reachable or
CONF.network.public_network_id):
msg = ('Either project_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
if not test.is_extension_enabled('security-group', 'network'):
msg = "security-group extension not enabled."
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
# Create no network resources for these tests.
cls.set_network_resources()
super(TestSecurityGroupsBasicOps, cls).setup_credentials()
# TODO(mnewby) Consider looking up entities as needed instead
# of storing them as collections on the class.
# Credentials from the manager are filled with both IDs and Names
cls.alt_creds = cls.alt_manager.credentials
@classmethod
def resource_setup(cls):
super(TestSecurityGroupsBasicOps, cls).resource_setup()
cls.multi_node = CONF.compute.min_compute_nodes > 1 and \
test.is_scheduler_filter_enabled("DifferentHostFilter")
if cls.multi_node:
LOG.info("Working in Multi Node mode")
else:
LOG.info("Working in Single Node mode")
cls.floating_ips = {}
cls.tenants = {}
creds = cls.manager.credentials
cls.primary_tenant = cls.TenantProperties(creds)
cls.alt_tenant = cls.TenantProperties(cls.alt_creds)
for tenant in [cls.primary_tenant, cls.alt_tenant]:
cls.tenants[tenant.creds.tenant_id] = tenant
cls.floating_ip_access = not CONF.network.public_router_id
# work-around for broken probe port
cls.floating_ip_access = False
def setUp(self):
"""Set up a single tenant with an accessible server.
If multi-host is enabled, save created server uuids.
"""
self.servers = []
super(TestSecurityGroupsBasicOps, self).setUp()
self._deploy_tenant(self.primary_tenant)
self._verify_network_details(self.primary_tenant)
self._verify_mac_addr(self.primary_tenant)
def _create_tenant_keypairs(self, tenant):
keypair = self.create_keypair(tenant.manager.keypairs_client)
tenant.keypair = keypair
def _create_tenant_security_groups(self, tenant):
access_sg = self._create_empty_security_group(
namestart='secgroup_access-',
tenant_id=tenant.creds.tenant_id,
client=tenant.manager.security_groups_client
)
# don't use default secgroup since it allows in-project traffic
def_sg = self._create_empty_security_group(
namestart='secgroup_general-',
tenant_id=tenant.creds.tenant_id,
client=tenant.manager.security_groups_client
)
tenant.security_groups.update(access=access_sg, default=def_sg)
ssh_rule = dict(
protocol='tcp',
port_range_min=22,
port_range_max=22,
direction='ingress',
)
sec_group_rules_client = tenant.manager.security_group_rules_client
self._create_security_group_rule(
secgroup=access_sg,
sec_group_rules_client=sec_group_rules_client,
**ssh_rule)
def _verify_network_details(self, tenant):
# Checks that we see the newly created network/subnet/router via
# checking the result of list_[networks,routers,subnets]
# Check that (router, subnet) couple exist in port_list
seen_nets = self._list_networks()
seen_names = [n['name'] for n in seen_nets]
seen_ids = [n['id'] for n in seen_nets]
self.assertIn(tenant.network.name, seen_names)
self.assertIn(tenant.network.id, seen_ids)
seen_subnets = [(n['id'], n['cidr'], n['network_id'])
for n in self._list_subnets()]
mysubnet = (tenant.subnet.id, tenant.subnet.cidr, tenant.network.id)
self.assertIn(mysubnet, seen_subnets)
seen_routers = self._list_routers()
seen_router_ids = [n['id'] for n in seen_routers]
seen_router_names = [n['name'] for n in seen_routers]
self.assertIn(tenant.router.name, seen_router_names)
self.assertIn(tenant.router.id, seen_router_ids)
myport = (tenant.router.id, tenant.subnet.id)
router_ports = [(i['device_id'], i['fixed_ips'][0]['subnet_id']) for i
in self._list_ports()
if self._is_router_port(i)]
self.assertIn(myport, router_ports)
def _is_router_port(self, port):
"""Return True if port is a router interface."""
# NOTE(armando-migliaccio): match device owner for both centralized
# and distributed routers; 'device_owner' is "" by default.
return port['device_owner'].startswith('network:router_interface')
def _create_server(self, name, tenant, security_groups=None, **kwargs):
"""Creates a server and assigns it to security group.
If multi-host is enabled, Ensures servers are created on different
compute nodes, by storing created servers' ids and uses different_host
as scheduler_hints on creation.
Validates servers are created as requested, using admin client.
"""
if security_groups is None:
security_groups = [tenant.security_groups['default']]
security_groups_names = [{'name': s['name']} for s in security_groups]
if self.multi_node:
kwargs["scheduler_hints"] = {'different_host': self.servers}
server = self.create_server(
name=name,
networks=[{'uuid': tenant.network.id}],
key_name=tenant.keypair['name'],
security_groups=security_groups_names,
wait_until='ACTIVE',
clients=tenant.manager,
**kwargs)
self.assertEqual(
sorted([s['name'] for s in security_groups]),
sorted([s['name'] for s in server['security_groups']]))
# Verify servers are on different compute nodes
if self.multi_node:
adm_get_server = self.admin_manager.servers_client.show_server
new_host = adm_get_server(server["id"])["server"][
"OS-EXT-SRV-ATTR:host"]
host_list = [adm_get_server(s)["server"]["OS-EXT-SRV-ATTR:host"]
for s in self.servers]
self.assertNotIn(new_host, host_list,
message="Failed to boot servers on different "
"Compute nodes.")
self.servers.append(server["id"])
return server
def _create_tenant_servers(self, tenant, num=1):
for i in range(num):
name = 'server-{tenant}-gen-{num}'.format(
tenant=tenant.creds.tenant_name,
num=i
)
name = data_utils.rand_name(name)
server = self._create_server(name, tenant)
tenant.servers.append(server)
def _set_access_point(self, tenant):
# creates a server in a secgroup with rule allowing external ssh
# in order to access project internal network
# workaround ip namespace
secgroups = tenant.security_groups.values()
name = 'server-{tenant}-access_point'.format(
tenant=tenant.creds.tenant_name)
name = data_utils.rand_name(name)
server = self._create_server(name, tenant,
security_groups=secgroups)
tenant.access_point = server
self._assign_floating_ips(tenant, server)
def _assign_floating_ips(self, tenant, server):
public_network_id = CONF.network.public_network_id
floating_ip = self.create_floating_ip(
server, public_network_id,
client=tenant.manager.floating_ips_client)
self.floating_ips.setdefault(server['id'], floating_ip)
def _create_tenant_network(self, tenant):
network, subnet, router = self.create_networks(
networks_client=tenant.manager.networks_client,
routers_client=tenant.manager.routers_client,
subnets_client=tenant.manager.subnets_client)
tenant.set_network(network, subnet, router)
def _deploy_tenant(self, tenant_or_id):
"""creates:
network
subnet
router (if public not defined)
access security group
access-point server
"""
if not isinstance(tenant_or_id, self.TenantProperties):
tenant = self.tenants[tenant_or_id]
else:
tenant = tenant_or_id
self._create_tenant_keypairs(tenant)
self._create_tenant_network(tenant)
self._create_tenant_security_groups(tenant)
self._set_access_point(tenant)
def _get_server_ip(self, server, floating=False):
"""returns the ip (floating/internal) of a server"""
if floating:
server_ip = self.floating_ips[server['id']].floating_ip_address
else:
server_ip = None
network_name = self.tenants[server['tenant_id']].network.name
if network_name in server['addresses']:
server_ip = server['addresses'][network_name][0]['addr']
return server_ip
def _connect_to_access_point(self, tenant):
"""create ssh connection to tenant access point"""
access_point_ssh = \
self.floating_ips[tenant.access_point['id']].floating_ip_address
private_key = tenant.keypair['private_key']
access_point_ssh = self.get_remote_client(
access_point_ssh, private_key=private_key)
return access_point_ssh
def _check_connectivity(self, access_point, ip, should_succeed=True):
if should_succeed:
msg = "Timed out waiting for %s to become reachable" % ip
else:
msg = "%s is reachable" % ip
self.assertTrue(self._check_remote_connectivity(access_point, ip,
should_succeed), msg)
def _test_in_tenant_block(self, tenant):
access_point_ssh = self._connect_to_access_point(tenant)
for server in tenant.servers:
self._check_connectivity(access_point=access_point_ssh,
ip=self._get_server_ip(server),
should_succeed=False)
def _test_in_tenant_allow(self, tenant):
ruleset = dict(
protocol='icmp',
remote_group_id=tenant.security_groups['default'].id,
direction='ingress'
)
self._create_security_group_rule(
secgroup=tenant.security_groups['default'],
security_groups_client=tenant.manager.security_groups_client,
**ruleset
)
access_point_ssh = self._connect_to_access_point(tenant)
for server in tenant.servers:
self._check_connectivity(access_point=access_point_ssh,
ip=self._get_server_ip(server))
def _test_cross_tenant_block(self, source_tenant, dest_tenant):
# if public router isn't defined, then dest_tenant access is via
# floating-ip
access_point_ssh = self._connect_to_access_point(source_tenant)
ip = self._get_server_ip(dest_tenant.access_point,
floating=self.floating_ip_access)
self._check_connectivity(access_point=access_point_ssh, ip=ip,
should_succeed=False)
def _test_cross_tenant_allow(self, source_tenant, dest_tenant):
"""check for each direction:
creating rule for tenant incoming traffic enables only 1way traffic
"""
ruleset = dict(
protocol='icmp',
direction='ingress'
)
sec_group_rules_client = (
dest_tenant.manager.security_group_rules_client)
self._create_security_group_rule(
secgroup=dest_tenant.security_groups['default'],
sec_group_rules_client=sec_group_rules_client,
**ruleset
)
access_point_ssh = self._connect_to_access_point(source_tenant)
ip = self._get_server_ip(dest_tenant.access_point,
floating=self.floating_ip_access)
self._check_connectivity(access_point_ssh, ip)
# test that reverse traffic is still blocked
self._test_cross_tenant_block(dest_tenant, source_tenant)
# allow reverse traffic and check
sec_group_rules_client = (
source_tenant.manager.security_group_rules_client)
self._create_security_group_rule(
secgroup=source_tenant.security_groups['default'],
sec_group_rules_client=sec_group_rules_client,
**ruleset
)
access_point_ssh_2 = self._connect_to_access_point(dest_tenant)
ip = self._get_server_ip(source_tenant.access_point,
floating=self.floating_ip_access)
self._check_connectivity(access_point_ssh_2, ip)
def _verify_mac_addr(self, tenant):
"""Verify that VM has the same ip, mac as listed in port"""
access_point_ssh = self._connect_to_access_point(tenant)
mac_addr = access_point_ssh.get_mac_address()
mac_addr = mac_addr.strip().lower()
# Get the fixed_ips and mac_address fields of all ports. Select
# only those two columns to reduce the size of the response.
port_list = self._list_ports(fields=['fixed_ips', 'mac_address'])
port_detail_list = [
(port['fixed_ips'][0]['subnet_id'],
port['fixed_ips'][0]['ip_address'],
port['mac_address'].lower())
for port in port_list if port['fixed_ips']
]
server_ip = self._get_server_ip(tenant.access_point)
subnet_id = tenant.subnet.id
self.assertIn((subnet_id, server_ip, mac_addr), port_detail_list)
@test.idempotent_id('e79f879e-debb-440c-a7e4-efeda05b6848')
@test.services('compute', 'network')
def test_cross_tenant_traffic(self):
if not self.credentials_provider.is_multi_tenant():
raise self.skipException("No secondary tenant defined")
try:
# deploy new project
self._deploy_tenant(self.alt_tenant)
self._verify_network_details(self.alt_tenant)
self._verify_mac_addr(self.alt_tenant)
# cross tenant check
source_tenant = self.primary_tenant
dest_tenant = self.alt_tenant
self._test_cross_tenant_block(source_tenant, dest_tenant)
self._test_cross_tenant_allow(source_tenant, dest_tenant)
except Exception:
for tenant in self.tenants.values():
self._log_console_output(servers=tenant.servers)
raise
@test.idempotent_id('63163892-bbf6-4249-aa12-d5ea1f8f421b')
@test.services('compute', 'network')
def test_in_tenant_traffic(self):
try:
self._create_tenant_servers(self.primary_tenant, num=1)
# in-tenant check
self._test_in_tenant_block(self.primary_tenant)
self._test_in_tenant_allow(self.primary_tenant)
except Exception:
for tenant in self.tenants.values():
self._log_console_output(servers=tenant.servers)
raise
@test.idempotent_id('f4d556d7-1526-42ad-bafb-6bebf48568f6')
@test.services('compute', 'network')
def test_port_update_new_security_group(self):
"""Verifies the traffic after updating the vm port
With new security group having appropriate rule.
"""
new_tenant = self.primary_tenant
# Create empty security group and add icmp rule in it
new_sg = self._create_empty_security_group(
namestart='secgroup_new-',
tenant_id=new_tenant.creds.tenant_id,
client=new_tenant.manager.security_groups_client)
icmp_rule = dict(
protocol='icmp',
direction='ingress',
)
sec_group_rules_client = new_tenant.manager.security_group_rules_client
self._create_security_group_rule(
secgroup=new_sg,
sec_group_rules_client=sec_group_rules_client,
**icmp_rule)
new_tenant.security_groups.update(new_sg=new_sg)
# Create server with default security group
name = 'server-{tenant}-gen-1'.format(
tenant=new_tenant.creds.tenant_name
)
name = data_utils.rand_name(name)
server = self._create_server(name, new_tenant)
# Check connectivity failure with default security group
try:
access_point_ssh = self._connect_to_access_point(new_tenant)
self._check_connectivity(access_point=access_point_ssh,
ip=self._get_server_ip(server),
should_succeed=False)
server_id = server['id']
port_id = self._list_ports(device_id=server_id)[0]['id']
# update port with new security group and check connectivity
self.ports_client.update_port(port_id, security_groups=[
new_tenant.security_groups['new_sg'].id])
self._check_connectivity(
access_point=access_point_ssh,
ip=self._get_server_ip(server))
except Exception:
for tenant in self.tenants.values():
self._log_console_output(servers=tenant.servers)
raise
@test.idempotent_id('d2f77418-fcc4-439d-b935-72eca704e293')
@test.services('compute', 'network')
def test_multiple_security_groups(self):
"""Verify multiple security groups and checks that rules
provided in the both the groups is applied onto VM
"""
tenant = self.primary_tenant
ip = self._get_server_ip(tenant.access_point,
floating=self.floating_ip_access)
ssh_login = CONF.validation.image_ssh_user
private_key = tenant.keypair['private_key']
self.check_vm_connectivity(ip,
should_connect=False)
ruleset = dict(
protocol='icmp',
direction='ingress'
)
self._create_security_group_rule(
secgroup=tenant.security_groups['default'],
**ruleset
)
# NOTE: Vm now has 2 security groups one with ssh rule(
# already added in setUp() method),and other with icmp rule
# (added in the above step).The check_vm_connectivity tests
# -that vm ping test is successful
# -ssh to vm is successful
self.check_vm_connectivity(ip,
username=ssh_login,
private_key=private_key,
should_connect=True)
@test.requires_ext(service='network', extension='port-security')
@test.idempotent_id('7c811dcc-263b-49a3-92d2-1b4d8405f50c')
@test.services('compute', 'network')
def test_port_security_disable_security_group(self):
"""Verify the default security group rules is disabled."""
new_tenant = self.primary_tenant
# Create server
name = 'server-{tenant}-gen-1'.format(
tenant=new_tenant.creds.tenant_name
)
name = data_utils.rand_name(name)
server = self._create_server(name, new_tenant)
access_point_ssh = self._connect_to_access_point(new_tenant)
server_id = server['id']
port_id = self._list_ports(device_id=server_id)[0]['id']
# Flip the port's port security and check connectivity
try:
self.ports_client.update_port(port_id,
port_security_enabled=True,
security_groups=[])
self._check_connectivity(access_point=access_point_ssh,
ip=self._get_server_ip(server),
should_succeed=False)
self.ports_client.update_port(port_id,
port_security_enabled=False,
security_groups=[])
self._check_connectivity(
access_point=access_point_ssh,
ip=self._get_server_ip(server))
except Exception:
for tenant in self.tenants.values():
self._log_console_output(servers=tenant.servers)
raise
| bigswitch/tempest | tempest/scenario/test_security_groups_basic_ops.py | Python | apache-2.0 | 26,295 | 0 |
# -*- coding: utf-8 -*-
"""Get credentials from file or environment variables"""
from functools import reduce
def dpath(dict_, path):
"""Dig into dictionary by string path. e.g.
dpath({'a': {'b': {'c': 'd'}}}, 'a.b') -> {'c': 'd'}
"""
from operator import getitem
paths = path.split('.')
return reduce(
getitem,
paths,
dict_
)
def credentials(filename=None, root=None):
"""Get credentials from JSON file or environment variables.
JSON file should have credentials in the form of:
{
"username": "myusername",
"password": "supersecret",
"api_key": "myapikey"
}
If filename not provided, fall back on environment variables:
- T1_API_USERNAME
- T1_API_PASSWORD
- T1_API_KEY
:param filename: str filename of JSON file containing credentials.
:param root: str path to get to credentials object. For instance, in object:
{
"credentials": {
"api": {
"username": "myusername",
"password": "supersecret",
"api_key": "myapikey"
}
}
}
"root" is "credentials.api"
:return: dict[str]str
:raise: TypeError: no JSON file or envvars
"""
if filename is not None:
import json
with open(filename, 'rb') as f:
conf = json.load(f)
if root is not None:
conf = dpath(conf, root)
else:
import os
try:
conf = {
'username': os.environ['T1_API_USERNAME'],
'password': os.environ['T1_API_PASSWORD'],
'api_key': os.environ['T1_API_KEY'],
}
except KeyError:
raise TypeError('Must either supply JSON file of credentials'
' or set environment variables '
'T1_API_{USERNAME,PASSWORD,KEY}')
return conf
| leiforion/t1-python | terminalone/utils/credentials.py | Python | bsd-3-clause | 1,971 | 0.000507 |
"""Grab the tips from Options.xml
$Id: extopts.py,v 1.1 2007/01/14 14:07:31 stephen Exp $
Originally ROX-Filer/src/po/tips.py by Thomas Leonard.
"""
from xml.sax import *
from xml.sax.handler import ContentHandler
import os, sys
class Handler(ContentHandler):
data = ""
def startElement(self, tag, attrs):
for x in ['title', 'label', 'end', 'unit']:
if attrs.has_key(x):
self.trans(attrs[x])
self.data = ""
def characters(self, data):
self.data = self.data + data
def endElement(self, tag):
data = self.data.strip()
if data:
self.trans(data)
self.data = ""
def trans(self, data):
data = '\\n'.join(data.split('\n'))
if data:
out.write('_("%s")\n' % data.replace('"', '\\"'))
ifname='Options.xml'
ofname='Options_strings'
if len(sys.argv)>2:
ifname=sys.argv[1]
ofname=sys.argv[2]
elif len(sys.argv)==2:
ifname=sys.argv[1]
print "Extracting translatable bits from %s..." % os.path.basename(ifname)
out = open(ofname, 'wb')
parse(ifname, Handler())
out.close()
| nilsonmorales/Badass | Packages_POU/pre-paquetes/20140430/20140430.pet/usr/share/local/apps/VideoThumbnail/extopts.py | Python | gpl-3.0 | 1,009 | 0.038652 |
import numpy as np
np.set_printoptions(precision=6, suppress=True, linewidth=320)
from numpy import where, zeros, ones, mod, conj, array, dot, angle, complex128 #, complex256
from numpy.linalg import solve
# Set the complex precision to use
complex_type = complex128
def calc_W(n, npqpv, C, W):
"""
Calculation of the inverse coefficients W.
@param n: Order of the coefficients
@param npqpv: number of pq and pv nodes
@param C: Structure of voltage coefficients (Ncoeff x nbus elements)
@param W: Structure of inverse voltage coefficients (Ncoeff x nbus elements)
@return: Array of inverse voltage coefficients for the order n
"""
if n == 0:
res = ones(npqpv, dtype=complex_type)
else:
l = arange(n)
res = -(W[l, :] * C[n - l, :]).sum(axis=0)
res /= conj(C[0, :])
return res
def pade_approximation(n, an, s=1):
"""
Computes the n/2 pade approximant of the series an at the approximation
point s
Arguments:
an: coefficient matrix, (number of coefficients, number of series)
n: order of the series
s: point of approximation
Returns:
pade approximation at s
"""
nn = int(n / 2)
if mod(nn, 2) == 0:
nn -= 1
L = nn
M = nn
an = np.ndarray.flatten(an)
rhs = an[L + 1:L + M + 1]
C = zeros((L, M), dtype=complex_type)
for i in range(L):
k = i + 1
C[i, :] = an[L - M + k:L + k]
try:
b = solve(C, -rhs) # bn to b1
except:
return 0, zeros(L + 1, dtype=complex_type), zeros(L + 1, dtype=complex_type)
b = r_[1, b[::-1]] # b0 = 1
a = zeros(L + 1, dtype=complex_type)
a[0] = an[0]
for i in range(L):
val = complex_type(0)
k = i + 1
for j in range(k + 1):
val += an[k - j] * b[j]
a[i + 1] = val
p = complex_type(0)
q = complex_type(0)
for i in range(L + 1):
p += a[i] * s ** i
q += b[i] * s ** i
return p / q, a, b
# @jit(cache=True)
def helmz(Vbus, Sbus, Ibus, Ybus, pq, pv, ref, pqpv, tol=1e-9, max_ter=5):
"""
Args:
admittances: Circuit complete admittance matrix
slackIndices: Indices of the slack buses (although most likely only one works)
coefficientCount: Number of voltage coefficients to evaluate (Must be an odd number)
powerInjections: Array of power injections matching the admittance matrix size
voltageSetPoints: Array of voltage set points matching the admittance matrix size
types: Array of bus types matching the admittance matrix size. types: {1-> PQ, 2-> PV, 3-> Slack}
Output:
Voltages vector
"""
# reduced impedance matrix
Zred = inv(Ybus[pqpv, :][:, pqpv]).toarray()
# slack currents
Ivd = -Ybus[pqpv, :][:, ref].dot(Vbus[ref])
# slack voltages influence
Ck = Zred.dot(Ivd)
npqpv = len(pqpv)
Vcoeff = zeros((0, npqpv), dtype=complex_type)
Wcoeff = zeros((0, npqpv), dtype=complex_type)
row = zeros((1, npqpv), dtype=complex_type)
for n in range(max_ter):
# reserve memory
Vcoeff = r_[Vcoeff, row.copy()]
Wcoeff = r_[Wcoeff, row.copy()]
if n == 0:
I = Ivd
else:
I = conj(Sbus[pqpv]) * Wcoeff[n-1, :]
# solve the voltage coefficients
Vcoeff[n, :] = Zred.dot(I)
# compute the inverse voltage coefficients
Wcoeff[n, :] = calc_W(n=n, npqpv=npqpv, C=Vcoeff, W=Wcoeff)
# compose the final voltage
voltage = Vbus.copy()
for i, ii in enumerate(pqpv):
voltage[ii], _, _ = pade_approximation(n, Vcoeff[:, i])
# evaluate F(x)
Scalc = voltage * conj(Ybus * voltage - Ibus)
mis = Scalc - Sbus # complex power mismatch
normF = linalg.norm(r_[mis[pv].real, mis[pq].real, mis[pq].imag], Inf)
print('Vcoeff:\n', Vcoeff)
print('V:\n', abs(Vcoeff.sum(axis=0)))
return voltage, normF
if __name__ == "__main__":
from GridCal.Engine.calculation_engine import *
grid = MultiCircuit()
grid.load_file('lynn5buspq.xlsx')
grid.compile()
circuit = grid.circuits[0]
print('\nYbus:\n', circuit.power_flow_input.Ybus.todense())
print('\nYseries:\n', circuit.power_flow_input.Yseries.todense())
print('\nYshunt:\n', circuit.power_flow_input.Yshunt)
print('\nSbus:\n', circuit.power_flow_input.Sbus)
print('\nIbus:\n', circuit.power_flow_input.Ibus)
print('\nVbus:\n', circuit.power_flow_input.Vbus)
print('\ntypes:\n', circuit.power_flow_input.types)
print('\npq:\n', circuit.power_flow_input.pq)
print('\npv:\n', circuit.power_flow_input.pv)
print('\nvd:\n', circuit.power_flow_input.ref)
import time
print('HELM-Z')
start_time = time.time()
cmax = 40
V1, err = helmz(Vbus=circuit.power_flow_input.Vbus,
Sbus=circuit.power_flow_input.Sbus,
Ibus=circuit.power_flow_input.Ibus,
Ybus=circuit.power_flow_input.Yseries,
pq=circuit.power_flow_input.pq,
pv=circuit.power_flow_input.pv,
ref=circuit.power_flow_input.ref,
pqpv=circuit.power_flow_input.pqpv,
max_ter=cmax)
print("--- %s seconds ---" % (time.time() - start_time))
# print_coeffs(C, W, R, X, H)
print('V module:\t', abs(V1))
print('V angle: \t', angle(V1))
print('error: \t', err)
# check the HELM solution: v against the NR power flow
print('\nNR')
options = PowerFlowOptions(SolverType.NR, verbose=False, robust=False, tolerance=1e-9)
power_flow = PowerFlow(grid, options)
start_time = time.time()
power_flow.run()
print("--- %s seconds ---" % (time.time() - start_time))
vnr = circuit.power_flow_results.voltage
print('V module:\t', abs(vnr))
print('V angle: \t', angle(vnr))
print('error: \t', circuit.power_flow_results.error)
# check
print('\ndiff:\t', V1 - vnr)
| SanPen/GridCal | src/research/power_flow/helm/old/helm_z_pq.py | Python | lgpl-3.0 | 6,030 | 0.002819 |
import platform
import socket
import sys
import os
from mule_local.JobGeneration import *
from mule.JobPlatformResources import *
from . import JobPlatformAutodetect
# Underscore defines symbols to be private
_job_id = None
def get_platform_autodetect():
"""
Returns
-------
bool
True if current platform matches, otherwise False
"""
return JobPlatformAutodetect.autodetect()
def get_platform_id():
"""
Return platform ID
Returns
-------
string
unique ID of platform
"""
return "himmuc"
def get_platform_resources():
"""
Return information about hardware
"""
h = JobPlatformResources()
h.num_cores_per_node = 4
# Number of nodes per job are limited
h.num_nodes = 40
h.num_cores_per_socket = 4
h.max_wallclock_seconds = 8*60*60
return h
def jobscript_setup(jg : JobGeneration):
"""
Setup data to generate job script
"""
global _job_id
_job_id = jg.runtime.getUniqueID(jg.compile, jg.unique_id_filter)
return
def jobscript_get_header(jg : JobGeneration):
"""
These headers typically contain the information on e.g. Job exection, number of compute nodes, etc.
Returns
-------
string
multiline text for scripts
"""
global _job_id
p = jg.parallelization
time_str = p.get_max_wallclock_seconds_hh_mm_ss()
#
# See https://www.lrz.de/services/compute/linux-cluster/batch_parallel/example_jobs/
#
content = """#! /bin/bash
#SBATCH -o """+jg.p_job_stdout_filepath+"""
#SBATCH -e """+jg.p_job_stderr_filepath+"""
#SBATCH -D """+jg.p_job_dirpath+"""
#SBATCH -J """+_job_id+"""
#SBATCH --get-user-env
#SBATCH --nodes="""+str(p.num_nodes)+"""
#SBATCH --ntasks-per-node="""+str(p.num_ranks_per_node)+"""
# the above is a good match for the
# CooLMUC2 architecture.
#SBATCH --mail-type=end
#SBATCH --mail-user=schreiberx@gmail.com
#SBATCH --export=NONE
#SBATCH --time="""+time_str+"""
#SBATCH --partition=odr
"""
content += "\n"
content += "module load mpi\n"
if False:
if p.force_turbo_off:
content += """# Try to avoid slowing down CPUs
#SBATCH --cpu-freq=Performance
"""
content += """
source /etc/profile.d/modules.sh
"""
if jg.compile.threading != 'off':
content += """
export OMP_NUM_THREADS="""+str(p.num_threads_per_rank)+"""
"""
if p.core_oversubscription:
raise Exception("Not supported with this script!")
if p.core_affinity != None:
content += "\necho \"Affnity: "+str(p.core_affinity)+"\"\n"
if p.core_affinity == 'compact':
content += "\nexport OMP_PROC_BIND=close\n"
elif p.core_affinity == 'scatter':
content += "\nexport OMP_PROC_BIND=spread\n"
else:
raise Exception("Affinity '"+str(p.core_affinity)+"' not supported")
return content
def jobscript_get_exec_prefix(jg : JobGeneration):
"""
Prefix before executable
Returns
-------
string
multiline text for scripts
"""
content = ""
content += jg.runtime.get_jobscript_plan_exec_prefix(jg.compile, jg.runtime)
return content
def jobscript_get_exec_command(jg : JobGeneration):
"""
Prefix to executable command
Returns
-------
string
multiline text for scripts
"""
p = jg.parallelization
mpiexec = ''
#
# Only use MPI exec if we are allowed to do so
# We shouldn't use mpiexec for validation scripts
#
if not p.mpiexec_disabled:
mpiexec = "mpiexec -n "+str(p.num_ranks)
content = """
# mpiexec ... would be here without a line break
EXEC=\""""+jg.compile.getProgramPath()+"""\"
PARAMS=\""""+jg.runtime.getRuntimeOptions()+"""\"
echo \"${EXEC} ${PARAMS}\"
"""+mpiexec+""" $EXEC $PARAMS || exit 1
"""
return content
def jobscript_get_exec_suffix(jg : JobGeneration):
"""
Suffix before executable
Returns
-------
string
multiline text for scripts
"""
content = ""
content += jg.runtime.get_jobscript_plan_exec_suffix(jg.compile, jg.runtime)
return content
def jobscript_get_footer(jg : JobGeneration):
"""
Footer at very end of job script
Returns
-------
string
multiline text for scripts
"""
content = ""
return content
def jobscript_get_compile_command(jg : JobGeneration):
"""
Compile command(s)
This is separated here to put it either
* into the job script (handy for workstations)
or
* into a separate compile file (handy for clusters)
Returns
-------
string
multiline text with compile command to generate executable
"""
content = """
SCONS="scons """+jg.compile.getSConsParams()+' -j 4"'+"""
echo "$SCONS"
$SCONS || exit 1
"""
return content
| schreiberx/sweet | mule/platforms/50_himmuc/JobPlatform.py | Python | mit | 4,801 | 0.016038 |
# ====================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ====================================================================
from unittest import TestCase
from lucene import \
IndexWriter, Term, SimpleAnalyzer, PerFieldAnalyzerWrapper, \
RAMDirectory, Document, Field, IndexSearcher, TermQuery, \
QueryParser, Analyzer, StringReader, Token, JavaError, \
Version
from lia.analysis.keyword.KeywordAnalyzer import KeywordAnalyzer
from lia.analysis.keyword.SimpleKeywordAnalyzer import SimpleKeywordAnalyzer
class KeywordAnalyzerTest(TestCase):
def setUp(self):
self.directory = RAMDirectory()
writer = IndexWriter(self.directory, SimpleAnalyzer(), True,
IndexWriter.MaxFieldLength.UNLIMITED)
doc = Document()
doc.add(Field("partnum", "Q36",
Field.Store.YES, Field.Index.NOT_ANALYZED))
doc.add(Field("description", "Illidium Space Modulator",
Field.Store.YES, Field.Index.ANALYZED))
writer.addDocument(doc)
writer.close()
self.searcher = IndexSearcher(self.directory, True)
def testTermQuery(self):
query = TermQuery(Term("partnum", "Q36"))
scoreDocs = self.searcher.search(query, 50).scoreDocs
self.assertEqual(1, len(scoreDocs))
def testBasicQueryParser(self):
analyzer = SimpleAnalyzer()
query = QueryParser(Version.LUCENE_CURRENT, "description",
analyzer).parse("partnum:Q36 AND SPACE")
scoreDocs = self.searcher.search(query, 50).scoreDocs
self.assertEqual("+partnum:q +space", query.toString("description"),
"note Q36 -> q")
self.assertEqual(0, len(scoreDocs), "doc not found :(")
def testPerFieldAnalyzer(self):
analyzer = PerFieldAnalyzerWrapper(SimpleAnalyzer())
analyzer.addAnalyzer("partnum", KeywordAnalyzer())
query = QueryParser(Version.LUCENE_CURRENT, "description",
analyzer).parse("partnum:Q36 AND SPACE")
scoreDocs = self.searcher.search(query, 50).scoreDocs
#self.assertEqual("+partnum:Q36 +space", query.toString("description"))
self.assertEqual(1, len(scoreDocs), "doc found!")
| fnp/pylucene | samples/LuceneInAction/lia/analysis/keyword/KeywordAnalyzerTest.py | Python | apache-2.0 | 2,858 | 0.00035 |
import sys
import os
import unittest
try:
from _pydev_bundle import pydev_monkey
except:
sys.path.append(os.path.dirname(os.path.dirname(__file__)))
from _pydev_bundle import pydev_monkey
from pydevd import SetupHolder
from _pydev_bundle.pydev_monkey import pydev_src_dir
class TestCase(unittest.TestCase):
def test_monkey(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check='''C:\\bin\\python.exe -u -c "
connect(\\"127.0.0.1\\")
"'''
sys.original_argv = []
self.assertEqual(
'"C:\\bin\\python.exe" "-u" "-c" "import sys; '
'sys.path.append(r\'%s\'); '
'import pydevd; pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, '
'trace_only_current_thread=False, patch_multiprocessing=True); '
'\nconnect(\\"127.0.0.1\\")\n"' % pydev_src_dir,
pydev_monkey.patch_arg_str_win(check)
)
finally:
SetupHolder.setup = original
def test_str_to_args_windows(self):
self.assertEqual(['a', 'b'], pydev_monkey.str_to_args_windows('a "b"'))
def test_monkey_patch_args_indc(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', '-u', '-c', 'connect(\\"127.0.0.1\\")']
sys.original_argv = []
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'-u',
'-c',
(
'import sys; sys.path.append(r\'%s\'); import pydevd; '
'pydevd.settrace(host=\'127.0.0.1\', port=0, suspend=False, trace_only_current_thread=False, patch_multiprocessing=True); '
'connect(\\"127.0.0.1\\")'
) % pydev_src_dir
])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_module(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', '-m', 'test']
sys.original_argv = ['pydevd', '--multiprocess']
if sys.platform == 'win32':
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'"pydevd"',
'"--module"',
'"--multiprocess"',
'test',
])
else:
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'pydevd',
'--module',
'--multiprocess',
'test',
])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_no_indc(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', 'connect(\\"127.0.0.1\\")']
sys.original_argv = ['my', 'original', 'argv']
if sys.platform == 'win32':
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe', '"my"', '"original"', '"argv"', 'connect(\\"127.0.0.1\\")'])
else:
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe', 'my', 'original', 'argv', 'connect(\\"127.0.0.1\\")'])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_no_indc_with_pydevd(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', 'pydevd.py', 'connect(\\"127.0.0.1\\")', 'bar']
sys.original_argv = ['my', 'original', 'argv']
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe', 'pydevd.py', 'connect(\\"127.0.0.1\\")', 'bar'])
finally:
SetupHolder.setup = original
def test_monkey_patch_args_no_indc_without_pydevd(self):
original = SetupHolder.setup
try:
SetupHolder.setup = {'client':'127.0.0.1', 'port': '0'}
check=['C:\\bin\\python.exe', 'target.py', 'connect(\\"127.0.0.1\\")', 'bar']
sys.original_argv = ['pydevd.py', '--a=1', 'b', '--c=2', '--file', 'ignore_this.py']
if sys.platform == 'win32':
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'"pydevd.py"',
'"--a=1"',
'"b"',
'"--c=2"',
'"--file"',
'target.py',
'connect(\\"127.0.0.1\\")',
'bar',
])
else:
self.assertEqual(pydev_monkey.patch_args(check), [
'C:\\bin\\python.exe',
'pydevd.py',
'--a=1',
'b',
'--c=2',
'--file',
'target.py',
'connect(\\"127.0.0.1\\")',
'bar',
])
finally:
SetupHolder.setup = original
if __name__ == '__main__':
unittest.main() | Soya93/Extract-Refactoring | python/helpers/pydev/tests_pydevd_python/test_pydev_monkey.py | Python | apache-2.0 | 5,544 | 0.00487 |
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
__version__ = '0.1'
from .proxy import Proxy
| mardiros/apium | apium/__init__.py | Python | bsd-3-clause | 122 | 0 |
# -*- coding: utf-8 -*-
u"""
Created on 2015-8-8
@author: cheng.li
"""
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySignValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAverageValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityXAverageValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityMACDValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityExpValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityLogValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySqrtValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityPowValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAbsValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAcosValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAcoshValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAsinValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityAsinhValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityNormInvValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityCeilValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityFloorValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityRoundValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityDiffValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecuritySimpleReturnValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityLogReturnValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityMaximumValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatelessTechnicalAnalysers import SecurityMinimumValueHolder
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingDecay
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingMax
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingArgMax
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingMin
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingArgMin
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingRank
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingQuantile
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingAllTrue
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingAnyTrue
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingSum
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingVariance
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingStandardDeviation
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingCountedPositive
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingPositiveAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingCountedNegative
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingNegativeAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingPositiveDifferenceAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingNegativeDifferenceAverage
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingRSI
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingLogReturn
from PyFin.Analysis.TechnicalAnalysis.StatefulTechnicalAnalysers import SecurityMovingCorrelation
__all__ = ['SecuritySignValueHolder',
'SecurityAverageValueHolder',
'SecurityXAverageValueHolder',
'SecurityMACDValueHolder',
'SecurityExpValueHolder',
'SecurityLogValueHolder',
'SecuritySqrtValueHolder',
'SecurityPowValueHolder',
'SecurityAbsValueHolder',
'SecurityAcosValueHolder',
'SecurityAcoshValueHolder',
'SecurityAsinValueHolder',
'SecurityAsinhValueHolder',
'SecurityNormInvValueHolder',
'SecurityCeilValueHolder',
'SecurityFloorValueHolder',
'SecurityRoundValueHolder',
'SecurityDiffValueHolder',
'SecuritySimpleReturnValueHolder',
'SecurityLogReturnValueHolder',
'SecurityMaximumValueHolder',
'SecurityMinimumValueHolder',
'SecurityMovingAverage',
'SecurityMovingDecay',
'SecurityMovingMax',
'SecurityMovingArgMax',
'SecurityMovingMin',
'SecurityMovingArgMin',
'SecurityMovingRank',
'SecurityMovingQuantile',
'SecurityMovingAllTrue',
'SecurityMovingAnyTrue',
'SecurityMovingSum',
'SecurityMovingVariance',
'SecurityMovingStandardDeviation',
'SecurityMovingCountedPositive',
'SecurityMovingPositiveAverage',
'SecurityMovingCountedNegative',
'SecurityMovingNegativeAverage',
'SecurityMovingPositiveDifferenceAverage',
'SecurityMovingNegativeDifferenceAverage',
'SecurityMovingRSI',
'SecurityMovingLogReturn',
'SecurityMovingCorrelation']
| wegamekinglc/Finance-Python | PyFin/Analysis/TechnicalAnalysis/__init__.py | Python | mit | 6,089 | 0.007226 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
import six
import webob.dec
import webob.exc
from manila.api.openstack import wsgi
from manila.i18n import _
from manila import utils
from manila.wsgi import common as base_wsgi
LOG = log.getLogger(__name__)
class FaultWrapper(base_wsgi.Middleware):
"""Calls down the middleware stack, making exceptions into faults."""
_status_to_type = {}
@staticmethod
def status_to_type(status):
if not FaultWrapper._status_to_type:
for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError):
FaultWrapper._status_to_type[clazz.code] = clazz
return FaultWrapper._status_to_type.get(
status, webob.exc.HTTPInternalServerError)()
def _error(self, inner, req):
if isinstance(inner, UnicodeDecodeError):
msg = _("Error decoding your request. Either the URL or the "
"request body contained characters that could not be "
"decoded by Manila.")
return wsgi.Fault(webob.exc.HTTPBadRequest(explanation=msg))
LOG.exception("Caught error: %s", inner)
safe = getattr(inner, 'safe', False)
headers = getattr(inner, 'headers', None)
status = getattr(inner, 'code', 500)
if status is None:
status = 500
msg_dict = dict(url=req.url, status=status)
LOG.info("%(url)s returned with HTTP %(status)d", msg_dict)
outer = self.status_to_type(status)
if headers:
outer.headers = headers
# NOTE(johannes): We leave the explanation empty here on
# purpose. It could possibly have sensitive information
# that should not be returned back to the user. See
# bugs 868360 and 874472
# NOTE(eglynn): However, it would be over-conservative and
# inconsistent with the EC2 API to hide every exception,
# including those that are safe to expose, see bug 1021373
if safe:
outer.explanation = '%s: %s' % (inner.__class__.__name__,
six.text_type(inner))
return wsgi.Fault(outer)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
try:
return req.get_response(self.application)
except Exception as ex:
return self._error(ex, req)
| bswartz/manila | manila/api/middleware/fault.py | Python | apache-2.0 | 3,089 | 0 |
from statistics import mean
from datetime import datetime, timedelta
import calendar
from typing import NamedTuple
from typing import Iterable, Tuple, Dict
from energy_shaper import group_into_profiled_intervals
from .dayanalysis import Usage, get_daily_charges
class MonthUsage(NamedTuple):
""" Represents a usage period """
days: int
peak: float
shoulder: float
offpeak: float
total: float
demand: float
def __repr__(self) -> str:
return f"<MonthUsage {self.days} days {self.total}>"
def get_monthly_charges(
records: Iterable[Tuple[datetime, datetime, float]],
retailer: str = "ergon",
tariff: str = "T14",
fy: str = "2016",
) -> Dict[Tuple[int, int], MonthUsage]:
""" Get summated monthly charges
:param records: Tuple in the form of (billing_start, billing_end, usage)
:param retailer: Retailer config to get the peak time periods from
:param tariff: Name of tariff from config
"""
months: dict = {}
billing = list(group_into_profiled_intervals(records, interval_m=30))
for reading in billing:
# Dates are end of billing period so first interval is previous day
day = reading.end - timedelta(hours=0.5)
month = (day.year, day.month)
if month not in months:
months[month] = []
dailies = get_daily_charges(records, retailer, tariff, fy)
for day in dailies:
month = (day.year, day.month)
months[month].append(dailies[day])
months_summary = {}
for month in months:
daily_data = months[month]
demand = average_peak_demand(daily_data)
u = [sum(x) for x in zip(*daily_data)]
num_days = calendar.monthrange(month[0], month[1])[1]
summary = MonthUsage(num_days, u[0], u[1], u[2], u[3], demand)
months_summary[month] = summary
return months_summary
def average_daily_peak_demand(peak_usage: float, peak_hrs: float = 6.5) -> float:
""" Calculate the average daily peak demand in kW
:param peak_usage: Usage during peak window in kWh
:param peak_hrs: Length of peak window in hours
"""
return peak_usage / peak_hrs
def average_peak_demand(daily_summary: Dict[str, Usage]) -> float:
""" Get the average peak demand for a set of daily usage stats
"""
# Sort and get top 4 demand days
top_four_days = []
for i, day in enumerate(
sorted(daily_summary, key=lambda tup: (tup[0], tup[1]), reverse=True)
):
if i < 4:
if day.peak:
demand = day.peak
else:
demand = day.shoulder
avg_peak_demand = average_daily_peak_demand(demand)
top_four_days.append(avg_peak_demand)
if top_four_days:
return mean(top_four_days)
else:
return 0
| aguinane/qld-tariffs | qldtariffs/monthanalysis.py | Python | mit | 2,897 | 0.000345 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Isaku Yamahata <yamahata at private email ne jp>
# <yamahata at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import flags
from nova import log as logging
from nova import utils
from nova.network import linux_net
from nova.openstack.common import cfg
from ryu.app.client import OFPClient
LOG = logging.getLogger(__name__)
ryu_linux_net_opt = cfg.StrOpt('linuxnet_ovs_ryu_api_host',
default='127.0.0.1:8080',
help='Openflow Ryu REST API host:port')
FLAGS = flags.FLAGS
FLAGS.register_opt(ryu_linux_net_opt)
def _get_datapath_id(bridge_name):
out, _err = utils.execute('ovs-vsctl', 'get', 'Bridge',
bridge_name, 'datapath_id', run_as_root=True)
return out.strip().strip('"')
def _get_port_no(dev):
out, _err = utils.execute('ovs-vsctl', 'get', 'Interface', dev,
'ofport', run_as_root=True)
return int(out.strip())
class LinuxOVSRyuInterfaceDriver(linux_net.LinuxOVSInterfaceDriver):
def __init__(self):
super(LinuxOVSRyuInterfaceDriver, self).__init__()
LOG.debug('ryu rest host %s', FLAGS.linuxnet_ovs_ryu_api_host)
self.ryu_client = OFPClient(FLAGS.linuxnet_ovs_ryu_api_host)
self.datapath_id = _get_datapath_id(
FLAGS.linuxnet_ovs_integration_bridge)
if linux_net.binary_name == 'nova-network':
for tables in [linux_net.iptables_manager.ipv4,
linux_net.iptables_manager.ipv6]:
tables['filter'].add_rule('FORWARD',
'--in-interface gw-+ --out-interface gw-+ -j DROP')
linux_net.iptables_manager.apply()
def plug(self, network, mac_address, gateway=True):
LOG.debug("network %s mac_adress %s gateway %s",
network, mac_address, gateway)
ret = super(LinuxOVSRyuInterfaceDriver, self).plug(
network, mac_address, gateway)
port_no = _get_port_no(self.get_dev(network))
self.ryu_client.create_port(network['uuid'], self.datapath_id, port_no)
return ret
| sileht/deb-openstack-quantum | quantum/plugins/ryu/nova/linux_net.py | Python | apache-2.0 | 2,786 | 0.000359 |
from distutils.core import setup
import py2exe
import os, sys
from glob import glob
import PyQt5
data_files=[('',['C:/Python34/DLLs/sqlite3.dll','C:/Python34/Lib/site-packages/PyQt5/icuuc53.dll','C:/Python34/Lib/site-packages/PyQt5/icudt53.dll','C:/Python34/Lib/site-packages/PyQt5/icuin53.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Gui.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Core.dll','C:/Python34/Lib/site-packages/PyQt5/Qt5Widgets.dll']),
('data',['data/configure','data/model.sqlite','data/loading.jpg']),
('platforms',['C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qminimal.dll','C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qoffscreen.dll','C:/Python34/Lib/site-packages/PyQt5/plugins/platforms/qwindows.dll'])
]
qt_platform_plugins = [("platforms", glob(PyQt5.__path__[0] + r'\plugins\platforms\*.*'))]
data_files.extend(qt_platform_plugins)
msvc_dlls = [('.', glob(r'''C:/Windows/System32/msvc?100.dll'''))]
data_files.extend(msvc_dlls)
setup(
windows = ["ChemDB.py"],
zipfile = None,
data_files = data_files,
options = {
'py2exe': {
'includes' : ['sip','PyQt5.QtCore','PyQt5.QtGui',"sqlite3",'xlrd','xlwt',"_sqlite3","PyQt5"],
}
},
) | dedichan/ChemDB | setup_win.py | Python | gpl-3.0 | 1,223 | 0.035159 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import numpy as np
import six
import paddle.fluid.core as core
import paddle.fluid.proto.framework_pb2 as framework_pb2
def get_all_op_protos():
"""
Get all registered op proto from PaddlePaddle C++ end.
:return: A list of registered OpProto.
"""
protostrs = core.get_all_op_protos()
ret_values = []
for pbstr in protostrs:
op_proto = framework_pb2.OpProto.FromString(six.binary_type(pbstr))
ret_values.append(op_proto)
return ret_values
def is_str(s):
return isinstance(s, six.string_types)
class OpDescCreationMethod(object):
"""
Convert the user's input(only keyword arguments are supported) to OpDesc
based on the OpProto.
:param op_proto: The OpProto object.
:type op_proto: op_proto_pb2.OpProto
"""
def __init__(self, op_proto):
if not isinstance(op_proto, framework_pb2.OpProto):
raise TypeError(
"Type of op_proto should be OpProto in PaddlePaddle.")
self.__op_proto__ = op_proto
def __call__(self, *args, **kwargs):
"""
Convert user's input to OpDesc. Only keyword arguments are supported.
:return: The OpDesc based on user input.
:rtype: op_desc_pb2.OpDesc
"""
if len(args) != 0:
raise ValueError("Only keyword arguments are supported.")
op_desc = framework_pb2.OpDesc()
for input_parameter in self.__op_proto__.inputs:
input_arguments = kwargs.get(input_parameter.name, [])
if is_str(input_arguments):
input_arguments = [input_arguments]
if not input_parameter.duplicable and len(input_arguments) > 1:
raise ValueError(
"Input %s expects only one input, but %d are given." %
(input_parameter.name, len(input_arguments)))
ipt = op_desc.inputs.add()
ipt.parameter = input_parameter.name
ipt.arguments.extend(input_arguments)
for output_parameter in self.__op_proto__.outputs:
output_arguments = kwargs.get(output_parameter.name, [])
if is_str(output_arguments):
output_arguments = [output_arguments]
if not output_parameter.duplicable and len(output_arguments) > 1:
raise ValueError(
"Output %s expects only one output, but %d are given." %
(output_parameter.name, len(output_arguments)))
out = op_desc.outputs.add()
out.parameter = output_parameter.name
out.arguments.extend(output_arguments)
# Types
op_desc.type = self.__op_proto__.type
# Attrs
for attr in self.__op_proto__.attrs:
if attr.generated:
continue
user_defined_attr = kwargs.get(attr.name, None)
if user_defined_attr is not None:
new_attr = op_desc.attrs.add()
new_attr.name = attr.name
new_attr.type = attr.type
if isinstance(user_defined_attr, np.ndarray):
user_defined_attr = user_defined_attr.tolist()
if attr.type == framework_pb2.INT:
new_attr.i = user_defined_attr
elif attr.type == framework_pb2.FLOAT:
new_attr.f = user_defined_attr
elif attr.type == framework_pb2.STRING:
new_attr.s = user_defined_attr
elif attr.type == framework_pb2.BOOLEAN:
new_attr.b = user_defined_attr
elif attr.type == framework_pb2.INTS:
new_attr.ints.extend(user_defined_attr)
elif attr.type == framework_pb2.FLOATS:
new_attr.floats.extend(user_defined_attr)
elif attr.type == framework_pb2.STRINGS:
new_attr.strings.extend(user_defined_attr)
elif attr.type == framework_pb2.BOOLEANS:
new_attr.bools.extend(user_defined_attr)
elif attr.type == framework_pb2.INT_PAIRS:
for p in user_defined_attr:
pair = new_attr.int_pairs.add()
pair.first = p[0]
pair.second = p[1]
else:
raise NotImplementedError(
"A not supported attribute type: %s." % (
str(attr.type)))
return op_desc
@staticmethod
def any_is_true(generator):
"""
Reduce a boolean array to a single boolean parameter. If any element in
the array is True, this function will return True, otherwise False.
"""
for flag in generator:
if flag:
return True
return False
class OpInfo(object):
def __init__(self, name, method, inputs, outputs, attrs):
self.name = name
self.method = method
self.inputs = inputs
self.outputs = outputs
self.attrs = attrs
def create_op_creation_method(op_proto):
"""
Generate op creation method for an OpProto.
"""
method = OpDescCreationMethod(op_proto)
def __impl__(*args, **kwargs):
opdesc = method(*args, **kwargs)
return core.Operator.create(opdesc.SerializeToString())
return OpInfo(
method=__impl__,
name=op_proto.type,
inputs=[(var.name, var.duplicable) for var in op_proto.inputs],
outputs=[(var.name, var.duplicable) for var in op_proto.outputs],
attrs=[attr.name for attr in op_proto.attrs])
class OperatorFactory(object):
def __init__(self):
self.op_methods = dict()
for op_proto in get_all_op_protos():
method = create_op_creation_method(op_proto)
self.op_methods[method.name] = method
def __call__(self, *args, **kwargs):
if "type" in kwargs:
if len(args) != 0:
raise ValueError(
"Except the argument \"type\","
"all of the other arguments should be keyword arguments.")
t = kwargs.pop("type")
else:
if len(args) != 1:
raise ValueError(
"Except the argument \"type\","
"all of the other arguments should be keyword arguments.")
t = args[0]
return self.get_op_info(t).method(**kwargs)
def types(self):
return list(self.op_methods.keys())
def get_op_info(self, t):
if t not in self.op_methods:
raise ValueError("The operator: %s is not registered." % t)
return self.op_methods.get(t)
def get_op_input_names(self, type):
return [x[0] for x in self.get_op_info(type).inputs]
def get_op_inputs(self, type):
return self.get_op_info(type).inputs
def get_op_output_names(self, type):
return [x[0] for x in self.get_op_info(type).outputs]
def get_op_outputs(self, type):
return self.get_op_info(type).outputs
def get_op_attr_names(self, type):
return self.get_op_info(type).attrs
class __RecurrentOp__(object):
__proto__ = None
type = "recurrent"
def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto
def __call__(self, *args, **kwargs):
if self.type not in args and "type" not in kwargs:
kwargs["type"] = self.type
# create proto
create_method = OpDescCreationMethod(self.__proto__)
proto = create_method(*args, **kwargs)
# create rnnop
return core.RecurrentOp.create(proto.SerializeToString())
class __DynamicRecurrentOp__(object):
__proto__ = None
type = "dynamic_recurrent"
def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto
def __call__(self, *args, **kwargs):
if self.type not in args and "type" not in kwargs:
kwargs["type"] = self.type
# create proto
create_method = OpDescCreationMethod(self.__proto__)
proto = create_method(*args, **kwargs)
# create rnnop
return core.DynamicRecurrentOp.create(proto.SerializeToString())
class __CondOp__(object):
__proto__ = None
type = "cond"
def __init__(self):
# cache recurrent_op's proto
if self.__proto__ is None:
for op_proto in get_all_op_protos():
if op_proto.type == self.type:
self.__proto__ = op_proto
def __call__(self, *args, **kwargs):
if self.type not in args and "type" not in kwargs:
kwargs["type"] = self.type
# create proto
create_method = OpDescCreationMethod(self.__proto__)
proto = create_method(*args, **kwargs)
# create condop
return core.CondOp.create(proto.SerializeToString())
Operator = OperatorFactory() # The default global factory
RecurrentOp = __RecurrentOp__()
DynamicRecurrentOp = __DynamicRecurrentOp__()
CondOp = __CondOp__()
| QiJune/Paddle | python/paddle/fluid/op.py | Python | apache-2.0 | 10,014 | 0 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
__RCSID__ = "$Id$"
def VmB(vmKey):
__memScale = {"kB": 1024.0, "mB": 1024.0 * 1024.0, "KB": 1024.0, "MB": 1024.0 * 1024.0}
__vmKeys = [
"VmPeak:",
"VmSize:",
"VmLck:",
"VmHWM:",
"VmRSS:",
"VmData:",
"VmStk:",
"VmExe:",
"VmLib:",
"VmPTE:",
"VmPeak",
"VmSize",
"VmLck",
"VmHWM",
"VmRSS",
"VmData",
"VmStk",
"VmExe",
"VmLib",
"VmPTE",
]
if vmKey not in __vmKeys:
return 0
procFile = "/proc/%d/status" % os.getpid()
# get pseudo file /proc/<pid>/status
try:
with open(procFile) as myFile:
value = myFile.read()
except Exception:
return 0.0 # non-Linux?
# get vmKey line e.g. 'VmRSS: 9999 kB\n ...'
i = value.index(vmKey)
value = value[i:].split(None, 3) # whitespace
if len(value) < 3:
return 0.0 # invalid format?
# convert Vm value to bytes
return float(value[1]) * __memScale[value[2]]
| ic-hep/DIRAC | src/DIRAC/Core/Utilities/MemStat.py | Python | gpl-3.0 | 1,174 | 0.000852 |
#!/usr/bin/env python
from setuptools import setup
setup(
name='YourAppName',
version='1.0',
description='OpenShift App',
author='Your Name',
author_email='example@example.com',
url='http://www.python.org/sigs/distutils-sig/',
install_requires=['Django<=1.4'],
)
| laloxxx20/TiendaVirtual | setup.py | Python | apache-2.0 | 293 | 0 |
from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
# relational databases are a terrible way to do
# multicast messages (just ask Twitter) but here you have it :-)
import re
reply_re = re.compile("^@(\w+)")
class Templar(models.Model):
user = models.ForeignKey(User)
public_key = models.CharField(max_length=250, unique=True)
get_absolute_url = models.CharField(max_length=250)
pw_encrypted = models.TextField()
avatar = models.TextField()
class Feed(models.Model):
title = models.CharField(max_length=250)
public_key = models.CharField(max_length=250)
created = models.DateTimeField(_('created'), default=datetime.now)
#relationships
owner = models.ForeignKey(Templar, related_name='feeds',null=True, blank=True)
followers = models.ManyToManyField(
'Templar',
through='FeedFollowers',
blank=True, null=True)
class Message(models.Model):
feed = models.ForeignKey(Feed,null=True, related_name='messages', blank=True)
templar = models.ForeignKey(Templar, related_name='messages',null=True, blank=True)
reply_to = models.ForeignKey('Message', related_name='replies', null=True, blank=True)
text = models.CharField(max_length=250)
sent = models.DateTimeField(_('sent'), default=datetime.now)
class FeedFollowers(models.Model):
feed = models.ForeignKey(Feed,null=True,blank=True)
follower = models.ForeignKey('Templar',null=True,blank=True)
def __unicode__(self):
return self.feed.name+' '+self.follower.user.username
#class Message(models.Model):
# """
# a single message from a user
# """
#
# text = models.CharField(_('text'), max_length=140)
# sender_type = models.ForeignKey(ContentType)
# sender_id = models.PositiveIntegerField()
# sender = generic.GenericForeignKey('sender_type', 'sender_id')
# sent = models.DateTimeField(_('sent'), default=datetime.now)
#
# def __unicode__(self):
# return self.text
#
# def get_absolute_url(self):
# return ("single_message", [self.id])
# get_absolute_url = models.permalink(get_absolute_url)
#
# class Meta:
# ordering = ('-sent',)
#
#
#class MessageInstanceManager(models.Manager):
#
# def messages_for(self, recipient):
# recipient_type = ContentType.objects.get_for_model(recipient)
# return MessageInstance.objects.filter(recipient_type=recipient_type, recipient_id=recipient.id)
#
#
#class MessageInstance(models.Model):
# """
# the appearance of a message in a follower's timeline
#
# denormalized for better performance
# """
#
# text = models.CharField(_('text'), max_length=140)
# sender_type = models.ForeignKey(ContentType, related_name='message_instances')
# sender_id = models.PositiveIntegerField()
# sender = generic.GenericForeignKey('sender_type', 'sender_id')
# sent = models.DateTimeField(_('sent'))
#
# # to migrate to generic foreign key, find out the content_type id of User and do something like:
# # ALTER TABLE "microblogging_messageinstance"
# # ADD COLUMN "recipient_type_id" integer NOT NULL
# # REFERENCES "django_content_type" ("id")
# # DEFAULT <user content type id>;
# #
# # NOTE: you will also need to drop the foreign key constraint if it exists
#
# # recipient = models.ForeignKey(User, related_name="received_message_instances", verbose_name=_('recipient'))
#
# recipient_type = models.ForeignKey(ContentType)
# recipient_id = models.PositiveIntegerField()
# recipient = generic.GenericForeignKey('recipient_type', 'recipient_id')
#
# objects = MessageInstanceManager()
#
#
#def message(sender, instance, created, **kwargs):
# #if message is None:
# # message = Message.objects.create(text=text, sender=user)
# recipients = set() # keep track of who's received it
# user = instance.sender
#
# # add the sender's followers
# user_content_type = ContentType.objects.get_for_model(user)
# followings = Following.objects.filter(followed_content_type=user_content_type, followed_object_id=user.id)
# for follower in (following.follower_content_object for following in followings):
# recipients.add(follower)
#
# # add sender
# recipients.add(user)
#
# # if starts with @user send it to them too even if not following
# match = reply_re.match(instance.text)
# if match:
# try:
# reply_recipient = User.objects.get(username=match.group(1))
# recipients.add(reply_recipient)
# except User.DoesNotExist:
# pass # oh well
# else:
# if notification:
# notification.send([reply_recipient], "message_reply_received", {'message': instance,})
#
# # now send to all the recipients
# for recipient in recipients:
# message_instance = MessageInstance.objects.create(text=instance.text, sender=user, recipient=recipient, sent=instance.sent)
#
#
#class FollowingManager(models.Manager):
#
# def is_following(self, follower, followed):
# try:
# following = self.get(follower_object_id=follower.id, followed_object_id=followed.id)
# return True
# except Following.DoesNotExist:
# return False
#
# def follow(self, follower, followed):
# if follower != followed and not self.is_following(follower, followed):
# Following(follower_content_object=follower, followed_content_object=followed).save()
#
# def unfollow(self, follower, followed):
# try:
# following = self.get(follower_object_id=follower.id, followed_object_id=followed.id)
# following.delete()
# except Following.DoesNotExist:
# pass
#
#
#class Following(models.Model):
# follower_content_type = models.ForeignKey(ContentType, related_name="followed", verbose_name=_('follower'))
# follower_object_id = models.PositiveIntegerField()
# follower_content_object = generic.GenericForeignKey('follower_content_type', 'follower_object_id')
#
# followed_content_type = models.ForeignKey(ContentType, related_name="followers", verbose_name=_('followed'))
# followed_object_id = models.PositiveIntegerField()
# followed_content_object = generic.GenericForeignKey('followed_content_type', 'followed_object_id')
#
# objects = FollowingManager()
#
#post_save.connect(message, sender=Message)
| claytantor/grailo | feeds/models.py | Python | lgpl-3.0 | 6,456 | 0.006506 |
#!/usr/bin/python
from config import hostname, port, username, password
import carddav
import sogotests
import unittest
import webdavlib
import time
class JsonDavEventTests(unittest.TestCase):
def setUp(self):
self._connect_as_user()
def _connect_as_user(self, newuser=username, newpassword=password):
self.dv = carddav.Carddav(newuser, newpassword)
def _create_new_event(self, path):
gid = self.dv.newguid(path)
event = {'startDate': "2015-12-25",
'startTime': "10:00",
'endDate': "2015-12-25",
'endTime': "23:00",
'isTransparent': 0,
'sendAppointmentNotifications': 0,
'summary': "Big party",
'alarm': {'action': 'display',
'quantity': 10,
'unit': "MINUTES",
'reference': "BEFORE",
'relation': "START",
'email': "sogo1@example.com"},
'organizer': {'name': u"Balthazar C\xe9sar",
'email': "sogo2@example.com"},
'c_name': gid,
'c_folder': path
}
return (event, path, gid)
def _get_dav_data(self, filename, user=username, passwd=password):
w = webdavlib.WebDAVClient(hostname, port, user, passwd)
query = webdavlib.HTTPGET("http://localhost/SOGo/dav/%s/Calendar/personal/%s" % (username, filename))
w.execute(query)
self.assertEquals(query.response['status'], 200)
return query.response['body'].split("\r\n")
def _get_dav_field(self, davdata, fieldname):
try:
data = [a.split(':')[1] for a in davdata if fieldname in a][0]
except IndexError:
data = ''
return data
def test_create_new_event(self):
path = 'Calendar/personal'
(event, folder, gid) = self._create_new_event(path)
#print "Saving Event to:", folder, gid
self.dv.save_event(event, folder, gid)
#- Get the event back with JSON
self._connect_as_user()
self.dv.load_events()
elist = [e for e in self.dv.events if e['c_name'] == gid]
#- MUST have this event -- only once
self.assertEquals(len(elist), 1)
strdate = "%d-%.02d-%.02d" % time.gmtime(elist[0]['c_startdate'])[0:3]
self.assertEquals(strdate, event['startDate'])
#- Get the event back with DAV
dav = self._get_dav_data(gid, username, password)
self.assertEquals(self._get_dav_field(dav, 'SUMMARY:'), event['summary'])
class JsonDavPhoneTests(unittest.TestCase):
def setUp(self):
self._connect_as_user()
self.newphone = [{'type': 'home', 'value': '123.456.7890'}]
self.newphones_difftype = [{'type': 'home', 'value': '123.456.7890'},
{'type': 'work', 'value': '987.654.3210'},
{'type': 'fax', 'value': '555.666.7777'}]
self.newphones_sametype = [{'type': 'work', 'value': '123.456.7890'},
{'type': 'work', 'value': '987.654.3210'}]
# Easier to erase them all in tearDown
self.allphones = list(self.newphone)
self.allphones.extend(self.newphones_difftype)
self.allphones.extend(self.newphones_sametype)
#- In case there are no cards for this user
try:
self._get_card()
except IndexError:
path = 'Contacts/personal'
(card, path, gid) = self._create_new_card(path)
self._save_card(card)
def tearDown(self):
self._connect_as_user()
self._get_card()
#- Remove the phones we just added
for phone in self.allphones:
try:
self.card['phones'].pop(self.card['phones'].index(phone))
except ValueError:
#print "Can't find", phone
pass
self._save_card()
def _connect_as_user(self, newuser=username, newpassword=password):
self.dv = carddav.Carddav(newuser, newpassword)
def _create_new_card(self, path):
gid = self.dv.newguid(path)
card = {'c_categories': None,
'c_cn': 'John Doe',
'c_component': 'vcard',
'c_givenname': 'John Doe',
'c_mail': 'johndoe@nothere.com',
'c_name': gid,
'c_o': '',
'c_screenname': '',
'c_sn': '',
'c_telephonenumber': '123.456.7890',
'emails': [{'type': 'pref', 'value': 'johndoe@nothere.com'}],
'phones': [{'type': 'home', 'value': '111.222.3333'}],
'id': gid}
return (card, path, gid)
def _get_card(self, name="John Doe"):
tmp_card = self.dv.get_cards(name)[0]
self.card = self.dv.get_card(tmp_card['c_name'])
def _save_card(self, card=None):
if card:
self.dv.save_card(card)
else:
self.dv.save_card(self.card)
def _get_dav_data(self, filename, user=username, passwd=password):
w = webdavlib.WebDAVClient(hostname, port, user, passwd)
query = webdavlib.HTTPGET("http://localhost/SOGo/dav/%s/Contacts/personal/%s" % (username, filename))
w.execute(query)
self.assertEquals(query.response['status'], 200)
return query.response['body'].split("\r\n")
def _phone_to_dav_str(self, phonedict):
return "TEL;TYPE=%s:%s" % (phonedict['type'], phonedict['value'])
def _testMultiplePhones(self, phones):
""" Add Multiple Phones to Contact JSON and verify with DAV """
#- Use JSON to get CARD and add a phone and save it back
self._get_card()
oldphones = self.card['phones']
oldphones.extend(phones)
self._save_card()
#- Make sure that the phone is there when using JSON
self._connect_as_user()
self._get_card()
#print "C:::", self.card
testphones = self.card['phones']
#print "P1:", oldphones
#print "P2:", testphones
self.assertEquals(sorted(oldphones), sorted(testphones))
#- Verify that DAV has the same values
dav = self._get_dav_data(self.card['id'], username, password)
for phone in phones:
found = dav.index(self._phone_to_dav_str(phone))
self.assertTrue(found > 0)
def testSinglePhone(self):
self._testMultiplePhones(self.newphone)
def testMultipleDifferentPhones(self):
self._testMultiplePhones(self.newphones_difftype)
def testMultipleSameTypePhones(self):
self._testMultiplePhones(self.newphones_sametype)
if __name__ == "__main__":
sogotests.runTests()
| saydulk/sogo | Tests/Integration/test-carddav.py | Python | gpl-2.0 | 6,859 | 0.004228 |
#!/usr/bin/python
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Most of this code was copied from the FIFE file xmlmap.py
# It is part of the local code base now so we can customize what happens
# as we read map files
import fife
try:
import xml.etree.cElementTree as ET
except:
import xml.etree.ElementTree as ET
import loaders
from serializers import *
import time
FORMAT = '1.0'
class XMLMapLoader(fife.ResourceLoader):
def __init__(self, engine, data, callback):
""" The XMLMapLoader parses the xml map using several section.
Each section fires a callback (if given) which can e. g. be
used to show a progress bar.
The callback sends two values, a string and a float (which shows
the overall process): callback(string, float)
Inputs:
engine = FIFE engine
data = Engine object for PARPG data
callback = function callback
"""
fife.ResourceLoader.__init__(self)
self.thisown = 0
self.callback = callback
self.engine = engine
self.data = data
self.vfs = self.engine.getVFS()
self.model = self.engine.getModel()
self.pool = self.engine.getImagePool()
self.anim_pool = self.engine.getAnimationPool()
self.map = None
self.source = None
self.time_to_load = 0
self.nspace = None
def _err(self, msg):
raise SyntaxError(''.join(['File: ', self.source, ' . ', msg]))
def loadResource(self, location):
start_time = time.time()
self.source = location.getFilename()
f = self.vfs.open(self.source)
f.thisown = 1
tree = ET.parse(f)
root = tree.getroot()
map = self.parseMap(root)
self.time_to_load = time.time() - start_time
return map
def parseMap(self, map_elt):
if not map_elt:
self._err('No <map> element found at top level of map file definition.')
id,format = map_elt.get('id'),map_elt.get('format')
if not format == FORMAT: self._err(''.join(['This file has format ', format, ' but this loader has format ', FORMAT]))
if not id: self._err('Map declared without an identifier.')
map = None
try:
self.map = self.model.createMap(str(id))
self.map.setResourceFile(self.source)
except fife.Exception, e: # NameClash appears as general fife.Exception; any ideas?
print e.getMessage()
print ''.join(['File: ', self.source, '. The map ', str(id), ' already exists! Ignoring map definition.'])
return None
# xml-specific directory imports. This is used by xml savers.
self.map.importDirs = []
if self.callback is not None:
self.callback('created map', float(0.25) )
self.parseImports(map_elt, self.map)
self.parseLayers(map_elt, self.map)
self.parseCameras(map_elt, self.map)
return self.map
def parseImports(self, map_elt, map):
parsedImports = {}
if self.callback:
tmplist = map_elt.findall('import')
i = float(0)
for item in map_elt.findall('import'):
file = item.get('file')
if file:
file = reverse_root_subfile(self.source, file)
dir = item.get('dir')
if dir:
dir = reverse_root_subfile(self.source, dir)
# Don't parse duplicate imports
if (dir,file) in parsedImports:
print "Duplicate import:" ,(dir,file)
continue
parsedImports[(dir,file)] = 1
if file and dir:
loaders.loadImportFile('/'.join(dir, file), self.engine)
elif file:
loaders.loadImportFile(file, self.engine)
elif dir:
loaders.loadImportDirRec(dir, self.engine)
map.importDirs.append(dir)
else:
print 'Empty import statement?'
if self.callback:
i += 1
self.callback('loaded imports', float( i / float(len(tmplist)) * 0.25 + 0.25 ) )
def parseLayers(self, map_elt, map):
if self.callback is not None:
tmplist = map_elt.findall('layer')
i = float(0)
for layer in map_elt.findall('layer'):
id = layer.get('id')
grid_type = layer.get('grid_type')
x_scale = layer.get('x_scale')
y_scale = layer.get('y_scale')
rotation = layer.get('rotation')
x_offset = layer.get('x_offset')
y_offset = layer.get('y_offset')
pathing = layer.get('pathing')
if not x_scale: x_scale = 1.0
if not y_scale: y_scale = 1.0
if not rotation: rotation = 0.0
if not x_offset: x_offset = 0.0
if not y_offset: y_offset = 0.0
if not pathing: pathing = "cell_edges_only"
if not id: self._err('<layer> declared with no id attribute.')
if not grid_type: self._err(''.join(['Layer ', str(id), ' has no grid_type attribute.']))
allow_diagonals = pathing == "cell_edges_and_diagonals"
cellgrid = self.model.getCellGrid(grid_type)
if not cellgrid: self._err('<layer> declared with invalid cellgrid type. (%s)' % grid_type)
cellgrid.setRotation(float(rotation))
cellgrid.setXScale(float(x_scale))
cellgrid.setYScale(float(y_scale))
cellgrid.setXShift(float(x_offset))
cellgrid.setYShift(float(y_offset))
layer_obj = None
try:
layer_obj = map.createLayer(str(id), cellgrid)
except fife.Exception, e:
print e.getMessage()
print 'The layer ' + str(id) + ' already exists! Ignoring this layer.'
continue
strgy = fife.CELL_EDGES_ONLY
if pathing == "cell_edges_and_diagonals":
strgy = fife.CELL_EDGES_AND_DIAGONALS
if pathing == "freeform":
strgy = fife.FREEFORM
layer_obj.setPathingStrategy(strgy)
self.parseInstances(layer, layer_obj)
if self.callback is not None:
i += 1
self.callback('loaded layer :' + str(id), float( i / float(len(tmplist)) * 0.25 + 0.5 ) )
# cleanup
if self.callback is not None:
del tmplist
del i
def parseInstances(self, layerelt, layer):
instelt = layerelt.find('instances')
instances = instelt.findall('i')
instances.extend(instelt.findall('inst'))
instances.extend(instelt.findall('instance'))
for instance in instances:
objectID = instance.get('object')
if not objectID:
objectID = instance.get('obj')
if not objectID:
objectID = instance.get('o')
if not objectID: self._err('<instance> does not specify an object attribute.')
nspace = instance.get('namespace')
if not nspace:
nspace = instance.get('ns')
if not nspace:
nspace = self.nspace
if not nspace: self._err('<instance> %s does not specify an object namespace, and no default is available.' % str(objectID))
self.nspace = nspace
object = self.model.getObject(str(objectID), str(nspace))
if not object:
print ''.join(['Object with id=', str(objectID), ' ns=', str(nspace), ' could not be found. Omitting...'])
continue
x = instance.get('x')
y = instance.get('y')
z = instance.get('z')
stackpos = instance.get('stackpos')
id = instance.get('id')
if x:
x = float(x)
self.x = x
else:
self.x = self.x + 1
x = self.x
if y:
y = float(y)
self.y = y
else:
y = self.y
if z:
z = float(z)
else:
z = 0.0
if not id:
id = ''
else:
id = str(id)
inst = layer.createInstance(object, fife.ExactModelCoordinate(x,y,z), str(id))
rotation = instance.get('r')
if not rotation:
rotation = instance.get('rotation')
if not rotation:
angles = object.get2dGfxVisual().getStaticImageAngles()
if angles:
rotation = angles[0]
else:
rotation = 0
else:
rotation = int(rotation)
inst.setRotation(rotation)
fife.InstanceVisual.create(inst)
if (stackpos):
inst.get2dGfxVisual().setStackPosition(int(stackpos))
if (object.getAction('default')):
target = fife.Location(layer)
inst.act('default', target, True)
#Check for PARPG specific object attributes
object_type = instance.get('object_type')
if ( object_type ):
inst_dict = {}
inst_dict["type"] = object_type
inst_dict["id"] = id
inst_dict["xpos"] = x
inst_dict["ypos"] = y
inst_dict["gfx"] = objectID
inst_dict["is_open"] = instance.get('is_open')
inst_dict["locked"] = instance.get('locked')
inst_dict["name"] = instance.get('name')
inst_dict["text"] = instance.get('text')
inst_dict["target_map_name"] = instance.get('target_map_name')
inst_dict["target_map"] = instance.get('target_map')
inst_dict["target_pos"] = (instance.get('target_x'), instance.get('target_y'))
self.data.createObject( layer, inst_dict, inst )
def parseCameras(self, map_elt, map):
if self.callback:
tmplist = map_elt.findall('camera')
i = float(0)
for camera in map_elt.findall('camera'):
id = camera.get('id')
zoom = camera.get('zoom')
tilt = camera.get('tilt')
rotation = camera.get('rotation')
ref_layer_id = camera.get('ref_layer_id')
ref_cell_width = camera.get('ref_cell_width')
ref_cell_height = camera.get('ref_cell_height')
viewport = camera.get('viewport')
if not zoom: zoom = 1
if not tilt: tilt = 0
if not rotation: rotation = 0
if not id: self._err('Camera declared without an id.')
if not ref_layer_id: self._err(''.join(['Camera ', str(id), ' declared with no reference layer.']))
if not (ref_cell_width and ref_cell_height): self._err(''.join(['Camera ', str(id), ' declared without reference cell dimensions.']))
try:
if viewport:
cam = self.engine.getView().addCamera(str(id), map.getLayer(str(ref_layer_id)),fife.Rect(*[int(c) for c in viewport.split(',')]),fife.ExactModelCoordinate(0,0,0))
else:
screen = self.engine.getRenderBackend()
cam = self.engine.getView().addCamera(str(id), map.getLayer(str(ref_layer_id)),fife.Rect(0,0,screen.getScreenWidth(),screen.getScreenHeight()),fife.ExactModelCoordinate(0,0,0))
cam.setCellImageDimensions(int(ref_cell_width), int(ref_cell_height))
cam.setRotation(float(rotation))
cam.setTilt(float(tilt))
cam.setZoom(float(zoom))
except fife.Exception, e:
print e.getMessage()
if self.callback:
i += 1
self.callback('loaded camera: ' + str(id), float( i / len(tmplist) * 0.25 + 0.75 ) )
| orlandov/parpg-game | local_loaders/xmlmap.py | Python | gpl-3.0 | 13,207 | 0.007496 |
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.thermal_zones_and_surfaces import GlazedDoorInterzone
log = logging.getLogger(__name__)
class TestGlazedDoorInterzone(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_glazeddoorinterzone(self):
pyidf.validation_level = ValidationLevel.error
obj = GlazedDoorInterzone()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_construction_name = "object-list|Construction Name"
obj.construction_name = var_construction_name
# object-list
var_building_surface_name = "object-list|Building Surface Name"
obj.building_surface_name = var_building_surface_name
# object-list
var_outside_boundary_condition_object = "object-list|Outside Boundary Condition Object"
obj.outside_boundary_condition_object = var_outside_boundary_condition_object
# real
var_multiplier = 1.0
obj.multiplier = var_multiplier
# real
var_starting_x_coordinate = 6.6
obj.starting_x_coordinate = var_starting_x_coordinate
# real
var_starting_z_coordinate = 7.7
obj.starting_z_coordinate = var_starting_z_coordinate
# real
var_length = 8.8
obj.length = var_length
# real
var_height = 9.9
obj.height = var_height
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.glazeddoorinterzones[0].name, var_name)
self.assertEqual(idf2.glazeddoorinterzones[0].construction_name, var_construction_name)
self.assertEqual(idf2.glazeddoorinterzones[0].building_surface_name, var_building_surface_name)
self.assertEqual(idf2.glazeddoorinterzones[0].outside_boundary_condition_object, var_outside_boundary_condition_object)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].multiplier, var_multiplier)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].starting_x_coordinate, var_starting_x_coordinate)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].starting_z_coordinate, var_starting_z_coordinate)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].length, var_length)
self.assertAlmostEqual(idf2.glazeddoorinterzones[0].height, var_height) | rbuffat/pyidf | tests/test_glazeddoorinterzone.py | Python | apache-2.0 | 2,670 | 0.003745 |
"""
Definition of the Prompt class, designed for editing Configuration
with a terminal prompt.
"""
from functools import partial
from prompt_toolkit import prompt
from prompt_toolkit.contrib.regular_languages.compiler import compile as pt_compile
from prompt_toolkit.contrib.completers import WordCompleter
from prompt_toolkit.contrib.regular_languages.completion import GrammarCompleter
import neural_world.commons as commons
import neural_world.actions as actions
LOGGER = commons.logger()
PROMPT_WELCOME = '?> '
COMMAND_NAMES = {
# command id: command aliases
'quit': ('quit', 'exit', ':q', 'q'),
'help': ('help', 'wtf', ':h', 'h'),
'conf': ('config', 'conf', ':c', 'c', ':p', 'p'),
'set' : ('set', ':s', 's'),
'get' : ('get', ':g', 'g'),
'apply': ('apply', ':a', 'a'),
}
def commands_grammar(config, commands=COMMAND_NAMES):
"""Return a grammar for given commands (dict command:aliases)
that use given Configuration for field autocompletion.
"""
def aliases(cmd):
"""access the aliases of given (sub)command.
if not in commands dict, will use it as an iterable."""
try: return '|'.join(commands[cmd])
except KeyError: return '|'.join(cmd)
def cmd2reg(cmd, subcmd=None, args=None):
"""layout automatization"""
return (
'(\s* (?P<cmd>(' + aliases(cmd) + '))'
+ ('' if subcmd is None
else ('\s+ (?P<subcmd>('+ aliases(subcmd) + ')) \s* '))
+ ('' if args is None else ('\s+ (?P<args>(.*)) \s* '))
+ ') |\n'
)
# get grammar, log it and return it
grammar = (
cmd2reg('quit', None, None)
+ cmd2reg('help', None, None)
+ cmd2reg('conf', None, None)
+ cmd2reg('set', config.mutable_fields, True)
+ cmd2reg('get', config.all_fields, None)
+ cmd2reg('apply', None, None)
)
LOGGER.debug('PROMPT GRAMMAR:\n' + str(grammar))
return pt_compile(grammar)
class Prompt(actions.ActionEmitter):
def __init__(self, config, invoker):
super().__init__(invoker)
self.config = config
self.grammar = commands_grammar(config)
completer = GrammarCompleter(
self.grammar,
{'subcmd': WordCompleter(tuple(config.all_fields))}
)
self._get_input = partial(prompt, PROMPT_WELCOME, completer=completer)
def input(self):
"""Handle user input, until user want to apply the config"""
while not self._handle(self._get_input()): pass
def _handle(self, input_text):
"""Return True when the user asks for leave the prompt"""
match = self.grammar.match(input_text)
if match is None:
print('invalid command')
return False # do not quit the prompt
elif len(input_text) == 0:
return False
else:
values = match.variables()
subcmd = values.get('subcmd')
args = values.get('args')
cmd = next( # get root name, not an alias
cmd_name
for cmd_name, aliases in COMMAND_NAMES.items()
if values.get('cmd') in aliases
)
# call function associated with the command
leave_prompt = bool(getattr(self, 'on_' + cmd)(subcmd, args))
return leave_prompt
return False
def on_quit(self, subcmd:None=None, args:None=None):
"""send a quit request to the simulation, and leave the prompt"""
self.invoker.add(actions.QuitAction())
return True # leave the prompt
def on_apply(self, subcmd:None=None, args:None=None):
"""Leave the prompt, then apply the configuration to the simulation"""
return True # leave the prompt
def on_conf(self, subcmd:None=None, args:None=None):
"""show the config"""
print(self.config)
def on_set(self, config_field, args):
"""set given value for given mutable config field
ex: set mutation_rate 0.2"""
setattr(self.config, config_field, args)
print(config_field, 'set to', getattr(self.config, config_field))
def on_get(self, config_field, args:None=None):
"""show value of given config field
ex: get space_height"""
print(config_field + ':', getattr(self.config, config_field))
def on_help(self, subcmd:None=None, args:None=None):
"""show this help"""
callbacks = tuple(sorted(
attrname[3:] for attrname in self.__dir__()
if attrname.startswith('on_')
))
maxlen = len(max(callbacks, key=len))
# printings !
for callback in callbacks:
print(callback.rjust(maxlen) + ':',
getattr(self, 'on_' + callback).__doc__)
| Aluriak/neural_world | neural_world/prompt.py | Python | gpl-2.0 | 4,845 | 0.009701 |
#!/usr/bin/env python
# coding: utf-8
from __future__ import unicode_literals
__license__ = 'Public Domain'
import codecs
import io
import os
import random
import sys
from .options import (
parseOpts,
)
from .compat import (
compat_expanduser,
compat_getpass,
compat_shlex_split,
workaround_optparse_bug9161,
)
from .utils import (
DateRange,
decodeOption,
DEFAULT_OUTTMPL,
DownloadError,
match_filter_func,
MaxDownloadsReached,
preferredencoding,
read_batch_urls,
SameFileError,
setproctitle,
std_headers,
write_string,
render_table,
)
from .update import update_self
from .downloader import (
FileDownloader,
)
from .extractor import gen_extractors, list_extractors
from .extractor.adobepass import MSO_INFO
from .YoutubeDL import YoutubeDL
def _real_main(argv=None):
# Compatibility fixes for Windows
if sys.platform == 'win32':
# https://github.com/rg3/youtube-dl/issues/820
codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)
workaround_optparse_bug9161()
setproctitle('youtube-dl')
parser, opts, args = parseOpts(argv)
# Set user agent
if opts.user_agent is not None:
std_headers['User-Agent'] = opts.user_agent
# Set referer
if opts.referer is not None:
std_headers['Referer'] = opts.referer
# Custom HTTP headers
if opts.headers is not None:
for h in opts.headers:
if ':' not in h:
parser.error('wrong header formatting, it should be key:value, not "%s"' % h)
key, value = h.split(':', 1)
if opts.verbose:
write_string('[debug] Adding header from command line option %s:%s\n' % (key, value))
std_headers[key] = value
# Dump user agent
if opts.dump_user_agent:
write_string(std_headers['User-Agent'] + '\n', out=sys.stdout)
sys.exit(0)
# Batch file verification
batch_urls = []
if opts.batchfile is not None:
try:
if opts.batchfile == '-':
batchfd = sys.stdin
else:
batchfd = io.open(
compat_expanduser(opts.batchfile),
'r', encoding='utf-8', errors='ignore')
batch_urls = read_batch_urls(batchfd)
if opts.verbose:
write_string('[debug] Batch file urls: ' + repr(batch_urls) + '\n')
except IOError:
sys.exit('ERROR: batch file could not be read')
all_urls = batch_urls + args
all_urls = [url.strip() for url in all_urls]
_enc = preferredencoding()
all_urls = [url.decode(_enc, 'ignore') if isinstance(url, bytes) else url for url in all_urls]
if opts.list_extractors:
for ie in list_extractors(opts.age_limit):
write_string(ie.IE_NAME + (' (CURRENTLY BROKEN)' if not ie._WORKING else '') + '\n', out=sys.stdout)
matchedUrls = [url for url in all_urls if ie.suitable(url)]
for mu in matchedUrls:
write_string(' ' + mu + '\n', out=sys.stdout)
sys.exit(0)
if opts.list_extractor_descriptions:
for ie in list_extractors(opts.age_limit):
if not ie._WORKING:
continue
desc = getattr(ie, 'IE_DESC', ie.IE_NAME)
if desc is False:
continue
if hasattr(ie, 'SEARCH_KEY'):
_SEARCHES = ('cute kittens', 'slithering pythons', 'falling cat', 'angry poodle', 'purple fish', 'running tortoise', 'sleeping bunny', 'burping cow')
_COUNTS = ('', '5', '10', 'all')
desc += ' (Example: "%s%s:%s" )' % (ie.SEARCH_KEY, random.choice(_COUNTS), random.choice(_SEARCHES))
write_string(desc + '\n', out=sys.stdout)
sys.exit(0)
if opts.ap_list_mso:
table = [[mso_id, mso_info['name']] for mso_id, mso_info in MSO_INFO.items()]
write_string('Supported TV Providers:\n' + render_table(['mso', 'mso name'], table) + '\n', out=sys.stdout)
sys.exit(0)
# Conflicting, missing and erroneous options
if opts.usenetrc and (opts.username is not None or opts.password is not None):
parser.error('using .netrc conflicts with giving username/password')
if opts.password is not None and opts.username is None:
parser.error('account username missing\n')
if opts.ap_password is not None and opts.ap_username is None:
parser.error('TV Provider account username missing\n')
if opts.outtmpl is not None and (opts.usetitle or opts.autonumber or opts.useid):
parser.error('using output template conflicts with using title, video ID or auto number')
if opts.usetitle and opts.useid:
parser.error('using title conflicts with using video ID')
if opts.username is not None and opts.password is None:
opts.password = compat_getpass('Type account password and press [Return]: ')
if opts.ap_username is not None and opts.ap_password is None:
opts.ap_password = compat_getpass('Type TV provider account password and press [Return]: ')
if opts.ratelimit is not None:
numeric_limit = FileDownloader.parse_bytes(opts.ratelimit)
if numeric_limit is None:
parser.error('invalid rate limit specified')
opts.ratelimit = numeric_limit
if opts.min_filesize is not None:
numeric_limit = FileDownloader.parse_bytes(opts.min_filesize)
if numeric_limit is None:
parser.error('invalid min_filesize specified')
opts.min_filesize = numeric_limit
if opts.max_filesize is not None:
numeric_limit = FileDownloader.parse_bytes(opts.max_filesize)
if numeric_limit is None:
parser.error('invalid max_filesize specified')
opts.max_filesize = numeric_limit
if opts.sleep_interval is not None:
if opts.sleep_interval < 0:
parser.error('sleep interval must be positive or 0')
if opts.max_sleep_interval is not None:
if opts.max_sleep_interval < 0:
parser.error('max sleep interval must be positive or 0')
if opts.max_sleep_interval < opts.sleep_interval:
parser.error('max sleep interval must be greater than or equal to min sleep interval')
else:
opts.max_sleep_interval = opts.sleep_interval
if opts.ap_mso and opts.ap_mso not in MSO_INFO:
parser.error('Unsupported TV Provider, use --ap-list-mso to get a list of supported TV Providers')
def parse_retries(retries):
if retries in ('inf', 'infinite'):
parsed_retries = float('inf')
else:
try:
parsed_retries = int(retries)
except (TypeError, ValueError):
parser.error('invalid retry count specified')
return parsed_retries
if opts.retries is not None:
opts.retries = parse_retries(opts.retries)
if opts.fragment_retries is not None:
opts.fragment_retries = parse_retries(opts.fragment_retries)
if opts.buffersize is not None:
numeric_buffersize = FileDownloader.parse_bytes(opts.buffersize)
if numeric_buffersize is None:
parser.error('invalid buffer size specified')
opts.buffersize = numeric_buffersize
if opts.playliststart <= 0:
raise ValueError('Playlist start must be positive')
if opts.playlistend not in (-1, None) and opts.playlistend < opts.playliststart:
raise ValueError('Playlist end must be greater than playlist start')
if opts.extractaudio:
if opts.audioformat not in ['best', 'aac', 'mp3', 'm4a', 'opus', 'vorbis', 'wav']:
parser.error('invalid audio format specified')
if opts.audioquality:
opts.audioquality = opts.audioquality.strip('k').strip('K')
if not opts.audioquality.isdigit():
parser.error('invalid audio quality specified')
if opts.recodevideo is not None:
if opts.recodevideo not in ['mp4', 'flv', 'webm', 'ogg', 'mkv', 'avi']:
parser.error('invalid video recode format specified')
if opts.convertsubtitles is not None:
if opts.convertsubtitles not in ['srt', 'vtt', 'ass']:
parser.error('invalid subtitle format specified')
if opts.date is not None:
date = DateRange.day(opts.date)
else:
date = DateRange(opts.dateafter, opts.datebefore)
# Do not download videos when there are audio-only formats
if opts.extractaudio and not opts.keepvideo and opts.format is None:
opts.format = 'bestaudio/best'
# --all-sub automatically sets --write-sub if --write-auto-sub is not given
# this was the old behaviour if only --all-sub was given.
if opts.allsubtitles and not opts.writeautomaticsub:
opts.writesubtitles = True
outtmpl = ((opts.outtmpl is not None and opts.outtmpl) or
(opts.format == '-1' and opts.usetitle and '%(title)s-%(id)s-%(format)s.%(ext)s') or
(opts.format == '-1' and '%(id)s-%(format)s.%(ext)s') or
(opts.usetitle and opts.autonumber and '%(autonumber)s-%(title)s-%(id)s.%(ext)s') or
(opts.usetitle and '%(title)s-%(id)s.%(ext)s') or
(opts.useid and '%(id)s.%(ext)s') or
(opts.autonumber and '%(autonumber)s-%(id)s.%(ext)s') or
DEFAULT_OUTTMPL)
if not os.path.splitext(outtmpl)[1] and opts.extractaudio:
parser.error('Cannot download a video and extract audio into the same'
' file! Use "{0}.%(ext)s" instead of "{0}" as the output'
' template'.format(outtmpl))
any_getting = opts.geturl or opts.gettitle or opts.getid or opts.getthumbnail or opts.getdescription or opts.getfilename or opts.getformat or opts.getduration or opts.dumpjson or opts.dump_single_json
any_printing = opts.print_json
download_archive_fn = compat_expanduser(opts.download_archive) if opts.download_archive is not None else opts.download_archive
# PostProcessors
postprocessors = []
# Add the metadata pp first, the other pps will copy it
if opts.metafromtitle:
postprocessors.append({
'key': 'MetadataFromTitle',
'titleformat': opts.metafromtitle
})
if opts.addmetadata:
postprocessors.append({'key': 'FFmpegMetadata'})
if opts.extractaudio:
postprocessors.append({
'key': 'FFmpegExtractAudio',
'preferredcodec': opts.audioformat,
'preferredquality': opts.audioquality,
'nopostoverwrites': opts.nopostoverwrites,
})
if opts.recodevideo:
postprocessors.append({
'key': 'FFmpegVideoConvertor',
'preferedformat': opts.recodevideo,
})
if opts.convertsubtitles:
postprocessors.append({
'key': 'FFmpegSubtitlesConvertor',
'format': opts.convertsubtitles,
})
if opts.embedsubtitles:
postprocessors.append({
'key': 'FFmpegEmbedSubtitle',
})
if opts.embedthumbnail:
already_have_thumbnail = opts.writethumbnail or opts.write_all_thumbnails
postprocessors.append({
'key': 'EmbedThumbnail',
'already_have_thumbnail': already_have_thumbnail
})
if not already_have_thumbnail:
opts.writethumbnail = True
# XAttrMetadataPP should be run after post-processors that may change file
# contents
if opts.xattrs:
postprocessors.append({'key': 'XAttrMetadata'})
# Please keep ExecAfterDownload towards the bottom as it allows the user to modify the final file in any way.
# So if the user is able to remove the file before your postprocessor runs it might cause a few problems.
if opts.exec_cmd:
postprocessors.append({
'key': 'ExecAfterDownload',
'exec_cmd': opts.exec_cmd,
})
external_downloader_args = None
if opts.external_downloader_args:
external_downloader_args = compat_shlex_split(opts.external_downloader_args)
postprocessor_args = None
if opts.postprocessor_args:
postprocessor_args = compat_shlex_split(opts.postprocessor_args)
match_filter = (
None if opts.match_filter is None
else match_filter_func(opts.match_filter))
ydl_opts = {
'usenetrc': opts.usenetrc,
'username': opts.username,
'password': opts.password,
'twofactor': opts.twofactor,
'videopassword': opts.videopassword,
'ap_mso': opts.ap_mso,
'ap_username': opts.ap_username,
'ap_password': opts.ap_password,
'quiet': (opts.quiet or any_getting or any_printing),
'no_warnings': opts.no_warnings,
'forceurl': opts.geturl,
'forcetitle': opts.gettitle,
'forceid': opts.getid,
'forcethumbnail': opts.getthumbnail,
'forcedescription': opts.getdescription,
'forceduration': opts.getduration,
'forcefilename': opts.getfilename,
'forceformat': opts.getformat,
'forcejson': opts.dumpjson or opts.print_json,
'dump_single_json': opts.dump_single_json,
'simulate': opts.simulate or any_getting,
'skip_download': opts.skip_download,
'format': opts.format,
'listformats': opts.listformats,
'outtmpl': outtmpl,
'autonumber_size': opts.autonumber_size,
'restrictfilenames': opts.restrictfilenames,
'ignoreerrors': opts.ignoreerrors,
'force_generic_extractor': opts.force_generic_extractor,
'ratelimit': opts.ratelimit,
'nooverwrites': opts.nooverwrites,
'retries': opts.retries,
'fragment_retries': opts.fragment_retries,
'skip_unavailable_fragments': opts.skip_unavailable_fragments,
'buffersize': opts.buffersize,
'noresizebuffer': opts.noresizebuffer,
'continuedl': opts.continue_dl,
'noprogress': opts.noprogress,
'progress_with_newline': opts.progress_with_newline,
'playliststart': opts.playliststart,
'playlistend': opts.playlistend,
'playlistreverse': opts.playlist_reverse,
'noplaylist': opts.noplaylist,
'logtostderr': opts.outtmpl == '-',
'consoletitle': opts.consoletitle,
'nopart': opts.nopart,
'updatetime': opts.updatetime,
'writedescription': opts.writedescription,
'writeannotations': opts.writeannotations,
'writeinfojson': opts.writeinfojson,
'writethumbnail': opts.writethumbnail,
'write_all_thumbnails': opts.write_all_thumbnails,
'writesubtitles': opts.writesubtitles,
'writeautomaticsub': opts.writeautomaticsub,
'allsubtitles': opts.allsubtitles,
'listsubtitles': opts.listsubtitles,
'subtitlesformat': opts.subtitlesformat,
'subtitleslangs': opts.subtitleslangs,
'matchtitle': decodeOption(opts.matchtitle),
'rejecttitle': decodeOption(opts.rejecttitle),
'max_downloads': opts.max_downloads,
'prefer_free_formats': opts.prefer_free_formats,
'verbose': opts.verbose,
'dump_intermediate_pages': opts.dump_intermediate_pages,
'write_pages': opts.write_pages,
'test': opts.test,
'keepvideo': opts.keepvideo,
'min_filesize': opts.min_filesize,
'max_filesize': opts.max_filesize,
'min_views': opts.min_views,
'max_views': opts.max_views,
'daterange': date,
'cachedir': opts.cachedir,
'youtube_print_sig_code': opts.youtube_print_sig_code,
'age_limit': opts.age_limit,
'download_archive': download_archive_fn,
'cookiefile': opts.cookiefile,
'nocheckcertificate': opts.no_check_certificate,
'prefer_insecure': opts.prefer_insecure,
'proxy': opts.proxy,
'socket_timeout': opts.socket_timeout,
'bidi_workaround': opts.bidi_workaround,
'debug_printtraffic': opts.debug_printtraffic,
'prefer_ffmpeg': opts.prefer_ffmpeg,
'include_ads': opts.include_ads,
'default_search': opts.default_search,
'youtube_include_dash_manifest': opts.youtube_include_dash_manifest,
'encoding': opts.encoding,
'extract_flat': opts.extract_flat,
'mark_watched': opts.mark_watched,
'merge_output_format': opts.merge_output_format,
'postprocessors': postprocessors,
'fixup': opts.fixup,
'source_address': opts.source_address,
'call_home': opts.call_home,
'sleep_interval': opts.sleep_interval,
'max_sleep_interval': opts.max_sleep_interval,
'external_downloader': opts.external_downloader,
'list_thumbnails': opts.list_thumbnails,
'playlist_items': opts.playlist_items,
'xattr_set_filesize': opts.xattr_set_filesize,
'match_filter': match_filter,
'no_color': opts.no_color,
'ffmpeg_location': opts.ffmpeg_location,
'hls_prefer_native': opts.hls_prefer_native,
'hls_use_mpegts': opts.hls_use_mpegts,
'external_downloader_args': external_downloader_args,
'postprocessor_args': postprocessor_args,
'cn_verification_proxy': opts.cn_verification_proxy,
'geo_verification_proxy': opts.geo_verification_proxy,
}
with YoutubeDL(ydl_opts) as ydl:
# Update version
if opts.update_self:
update_self(ydl.to_screen, opts.verbose, ydl._opener)
# Remove cache dir
if opts.rm_cachedir:
ydl.cache.remove()
# Maybe do nothing
if (len(all_urls) < 1) and (opts.load_info_filename is None):
if opts.update_self or opts.rm_cachedir:
sys.exit()
ydl.warn_if_short_id(sys.argv[1:] if argv is None else argv)
parser.error(
'You must provide at least one URL.\n'
'Type youtube-dl --help to see a list of all options.')
try:
if opts.load_info_filename is not None:
retcode = ydl.download_with_info_file(compat_expanduser(opts.load_info_filename))
else:
retcode = ydl.download(all_urls)
except MaxDownloadsReached:
ydl.to_screen('--max-download limit reached, aborting.')
retcode = 101
sys.exit(retcode)
def main(argv=None):
try:
_real_main(argv)
except DownloadError:
sys.exit(1)
except SameFileError:
sys.exit('ERROR: fixed output name but more than one file to download')
except KeyboardInterrupt:
sys.exit('\nERROR: Interrupted by user')
__all__ = ['main', 'YoutubeDL', 'gen_extractors', 'list_extractors']
| jbuchbinder/youtube-dl | youtube_dl/__init__.py | Python | unlicense | 18,757 | 0.001546 |
#!/usr/bin/env python
#-*- coding: utf-8 -
import keystoneclient.v2_0.client as keystone
from keystoneauth1.identity import v2
from keystoneauth1 import session
import novaclient.client as nova
import cinderclient.client as cinder
from glanceclient.v1 import client as glance
import neutronclient.v2_0.client as neutron
import heatclient.client as heat
import time, paramiko,os,re,errno
from socket import error as socket_error
from os import environ as env
class OpenStackUtils():
def __init__(self):
auth = v2.Password(auth_url=env['OS_AUTH_URL'],
username=env['OS_USERNAME'],
password=env['OS_PASSWORD'],
tenant_id=env['OS_TENANT_ID'])
sess = session.Session(auth=auth)
self.keystone_client = keystone.Client(username=env['OS_USERNAME'],
password=env['OS_PASSWORD'],
tenant_id=env['OS_TENANT_ID'],
auth_url=env['OS_AUTH_URL'],
region_name=env['OS_REGION_NAME'])
heat_url = self.keystone_client \
.service_catalog.url_for(service_type='orchestration',
endpoint_type='publicURL')
self.nova_client = nova.Client('2.1', region_name=env['OS_REGION_NAME'], session=sess)
self.cinder_client = cinder.Client('2', region_name=env['OS_REGION_NAME'], session=sess)
self.glance_client = glance.Client('2', region_name=env['OS_REGION_NAME'], session=sess)
self.neutron_client = neutron.Client(region_name=env['OS_REGION_NAME'], session=sess)
self.heat_client = heat.Client('1', region_name=env['OS_REGION_NAME'], endpoint=heat_url, session=sess)
def boot_vm_with_userdata_and_port(self,userdata_path,keypair,port):
#nics = [{'port-id': env['NOSE_PORT_ID']}]
nics = [{'port-id': port['port']['id'] }]
server = self.nova_client.servers.create(name="test-server-" + self.current_time_ms(), image=env['NOSE_IMAGE_ID'],
flavor=env['NOSE_FLAVOR'],userdata=file(userdata_path),key_name=keypair.name, nics=nics)
print 'Building, please wait...'
# wait for server create to be complete
self.wait_server_is_up(server)
self.wait_for_cloud_init(server)
return server
def boot_vm(self,image_id=env['NOSE_IMAGE_ID'],flavor=env['NOSE_FLAVOR'],keypair='default'):
nics = [{'net-id': env['NOSE_NET_ID']}]
server = self.nova_client.servers.create(name="test-server-" + self.current_time_ms(), image=image_id,security_groups=[env['NOSE_SG_ID']],
flavor=flavor, key_name=keypair.name, nics=nics)
print 'Building, please wait...'
self.wait_server_is_up(server)
self.wait_for_cloud_init(server)
return server
def get_server(self,server_id):
return self.nova_client.servers.get(server_id)
def destroy_server(self,server):
self.nova_client.servers.delete(server)
time.sleep(30)
def current_time_ms(self):
return str(int(round(time.time() * 1000)))
def get_console_log(self,server):
return self.nova_client.servers.get(server.id).get_console_output(length=600)
def get_spice_console(self,server):
return self.nova_client.servers.get(server.id).get_spice_console('spice-html5')
def create_server_snapshot(self,server):
return self.nova_client.servers.create_image(server,server.name+self.current_time_ms())
def get_image(self,image_id):
return self.glance_client.images.get(image_id)
def destroy_image(self,image_id):
self.glance_client.images.delete(image_id)
def initiate_ssh(self,floating_ip,private_key_filename):
ssh_connection = paramiko.SSHClient()
ssh_connection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
retries_left = 5
while True:
try:
ssh_connection.connect(floating_ip.ip,username='cloud',key_filename=private_key_filename,timeout=180)
break
except socket_error as e:
if e.errno != errno.ECONNREFUSED or retries_left <= 1:
raise e
time.sleep(10) # wait 10 seconds and retry
retries_left -= 1
return ssh_connection
def create_floating_ip(self):
return self.nova_client.floating_ips.create('public')
#def associate_floating_ip_to_port(self,floating_ip):
# self.neutron_client.update_floatingip(floating_ip.id,{'floatingip': {'port_id': env['NOSE_PORT_ID'] }})
def associate_floating_ip_to_server(self,floating_ip, server):
self.nova_client.servers.get(server.id).add_floating_ip(floating_ip.ip)
time.sleep(10)
def delete_floating_ip(self,floating_ip):
self.nova_client.floating_ips.delete(floating_ip.id)
def rescue(self,server):
self.wait_server_available(server)
return self.nova_client.servers.get(server.id).rescue()
def unrescue(self,server):
self.wait_server_available(server)
return self.nova_client.servers.get(server.id).unrescue()
def attach_volume_to_server(self,server,volume):
#self.nova_client.volumes.create_server_volume(server_id=server.id,volume_id=env['NOSE_VOLUME_ID'])
self.nova_client.volumes.create_server_volume(server_id=server.id,volume_id=volume.id)
status =volume.status
while status != 'in-use':
status = self.cinder_client.volumes.get(volume.id).status
print status
print "volume is in use Now : "+ status
def detach_volume_from_server(self,server,volume):
#self.nova_client.volumes.delete_server_volume(server.id,env['NOSE_VOLUME_ID'])
self.nova_client.volumes.delete_server_volume(server.id,volume.id)
def get_flavor_disk_size(self,flavor_id):
return self.nova_client.flavors.get(flavor_id).disk
def server_reboot(self,server,type):
serv=self.get_server(server.id)
serv.reboot(reboot_type=type)
def wait_server_is_up(self,server):
status = server.status
while status != 'ACTIVE':
status = self.get_server(server.id).status
print "server is up"
def wait_for_cloud_init(self,server):
while True:
console_log = self.get_console_log(server)
if re.search('^.*Cloud-init .* finished.*$', console_log, flags=re.MULTILINE):
print("Cloudinit finished")
break
else:
time.sleep(10)
def wait_server_available(self,server):
task_state = getattr(server,'OS-EXT-STS:task_state')
while task_state is not None:
task_state = getattr(self.get_server(server.id),'OS-EXT-STS:task_state')
print "the server is available"
def create_keypair(self):
suffix =self.current_time_ms()
keypair= self.nova_client.keypairs.create(name="nose_keypair"+suffix)
private_key_filename = env['HOME']+'/key-'+suffix+'.pem'
fp = os.open(private_key_filename, os.O_WRONLY | os.O_CREAT, 0o600)
with os.fdopen(fp, 'w') as f:
f.write(keypair.private_key)
return keypair , private_key_filename
def delete_keypair(self,keypair,private_key_filename):
self.nova_client.keypairs.delete(keypair.id)
os.remove(private_key_filename)
def create_port_with_sg(self):
body_value = {'port': {
'admin_state_up': True,
'security_groups': [env['NOSE_SG_ID']],
'name': 'port-test'+self.current_time_ms(),
'network_id': env['NOSE_NET_ID'],
}}
port=self.neutron_client.create_port(body=body_value)
time.sleep(20)
return port
def delete_port(self,port):
self.neutron_client.delete_port(port['port']['id'])
def create_volume(self):
volume=self.cinder_client.volumes.create(5, name="test-volume"+self.current_time_ms())
print "the status of volume is:"+ volume.status
status = volume.status
while status != 'available':
status = self.cinder_client.volumes.get(volume.id).status
print "volume is created : "+ status
return volume
def delete_volume(self,volume):
self.cinder_client.volumes.delete(volume.id)
| juliend88/os_image_factory | test-tools/pytesting_os/openstackutils.py | Python | gpl-3.0 | 8,608 | 0.015335 |
import os, glob
__all__ = [os.path.basename(f)[:-3] for f in glob.glob(os.path.dirname(__file__) + "/*.py")] | dana-i2cat/felix | msjp/module/common/__init__.py | Python | apache-2.0 | 108 | 0.027778 |
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
from ..utils import (
clean_html,
ExtractorError,
determine_ext,
)
class XVideosIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?xvideos\.com/video(?P<id>[0-9]+)(?:.*)'
_TEST = {
'url': 'http://www.xvideos.com/video4588838/biker_takes_his_girl',
'md5': '4b46ae6ea5e6e9086e714d883313c0c9',
'info_dict': {
'id': '4588838',
'ext': 'flv',
'title': 'Biker Takes his Girl',
'age_limit': 18,
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
mobj = re.search(r'<h1 class="inlineError">(.+?)</h1>', webpage)
if mobj:
raise ExtractorError('%s said: %s' % (self.IE_NAME, clean_html(mobj.group(1))), expected=True)
video_title = self._html_search_regex(
r'<title>(.*?)\s+-\s+XVID', webpage, 'title')
video_thumbnail = self._search_regex(
r'url_bigthumb=(.+?)&', webpage, 'thumbnail', fatal=False)
formats = []
video_url = compat_urllib_parse_unquote(self._search_regex(
r'flv_url=(.+?)&', webpage, 'video URL', default=''))
if video_url:
formats.append({'url': video_url})
player_args = self._search_regex(
r'(?s)new\s+HTML5Player\((.+?)\)', webpage, ' html5 player', default=None)
if player_args:
for arg in player_args.split(','):
format_url = self._search_regex(
r'(["\'])(?P<url>https?://.+?)\1', arg, 'url',
default=None, group='url')
if not format_url:
continue
ext = determine_ext(format_url)
if ext == 'mp4':
formats.append({'url': format_url})
elif ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4',
entry_protocol='m3u8_native', m3u8_id='hls', fatal=False))
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'title': video_title,
'thumbnail': video_thumbnail,
'age_limit': 18,
}
| daineseh/kodi-plugin.video.ted-talks-chinese | youtube_dl/extractor/xvideos.py | Python | gpl-2.0 | 2,444 | 0.001227 |
# ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Export Pages functions
"""
import logging
from urllib import quote
from exe.webui import common
log = logging.getLogger(__name__)
# ===========================================================================
class Page(object):
"""
This is an abstraction for a page containing a node
e.g. in a SCORM package or Website
"""
def __init__(self, name, depth, node):
"""
Initialize
"""
self.name = name
self.depth = depth
self.node = node
def renderLicense(self):
"""
Returns an XHTML string rendering the license.
"""
licenses = {"license GFDL": "http://www.gnu.org/copyleft/fdl.html",
"creative commons: attribution 2.5": "http://creativecommons.org/licenses/by/2.5/",
"creative commons: attribution - share alike 2.5": "http://creativecommons.org/licenses/by-sa/2.5/",
"creative commons: attribution - non derived work 2.5": "http://creativecommons.org/licenses/by-nd/2.5/",
"creative commons: attribution - non commercial 2.5": "http://creativecommons.org/licenses/by-nc/2.5/",
"creative commons: attribution - non commercial - share alike 2.5": "http://creativecommons.org/licenses/by-nc-sa/2.5/",
"creative commons: attribution - non derived work - non commercial 2.5": "http://creativecommons.org/licenses/by-nc-nd/2.5/",
"creative commons: attribution 3.0": "http://creativecommons.org/licenses/by/3.0/",
"creative commons: attribution - share alike 3.0": "http://creativecommons.org/licenses/by-sa/3.0/",
"creative commons: attribution - non derived work 3.0": "http://creativecommons.org/licenses/by-nd/3.0/",
"creative commons: attribution - non commercial 3.0": "http://creativecommons.org/licenses/by-nc/3.0/",
"creative commons: attribution - non commercial - share alike 3.0": "http://creativecommons.org/licenses/by-nc-sa/3.0/",
"creative commons: attribution - non derived work - non commercial 3.0": "http://creativecommons.org/licenses/by-nc-nd/3.0/",
"creative commons: attribution 4.0": "http://creativecommons.org/licenses/by/4.0/",
"creative commons: attribution - share alike 4.0": "http://creativecommons.org/licenses/by-sa/4.0/",
"creative commons: attribution - non derived work 4.0": "http://creativecommons.org/licenses/by-nd/4.0/",
"creative commons: attribution - non commercial 4.0": "http://creativecommons.org/licenses/by-nc/4.0/",
"creative commons: attribution - non commercial - share alike 4.0": "http://creativecommons.org/licenses/by-nc-sa/4.0/",
"creative commons: attribution - non derived work - non commercial 4.0": "http://creativecommons.org/licenses/by-nc-nd/4.0/",
"free software license GPL": "http://www.gnu.org/copyleft/gpl.html"
}
licenses_names = {"license GFDL": c_("GNU Free Documentation License"),
"creative commons: attribution 2.5": c_("Creative Commons Attribution License 2.5"),
"creative commons: attribution - share alike 2.5": c_("Creative Commons Attribution Share Alike License 2.5"),
"creative commons: attribution - non derived work 2.5": c_("Creative Commons Attribution No Derivatives License 2.5"),
"creative commons: attribution - non commercial 2.5": c_("Creative Commons Attribution Non-commercial License 2.5"),
"creative commons: attribution - non commercial - share alike 2.5": c_("Creative Commons Attribution Non-commercial Share Alike License 2.5"),
"creative commons: attribution - non derived work - non commercial 2.5": c_("Creative Commons Attribution Non-commercial No Derivatives License 2.5"),
"creative commons: attribution 3.0": c_("Creative Commons Attribution License 3.0"),
"creative commons: attribution - share alike 3.0": c_("Creative Commons Attribution Share Alike License 3.0"),
"creative commons: attribution - non derived work 3.0": c_("Creative Commons Attribution No Derivatives License 3.0"),
"creative commons: attribution - non commercial 3.0": c_("Creative Commons Attribution Non-commercial License 3.0"),
"creative commons: attribution - non commercial - share alike 3.0": c_("Creative Commons Attribution Non-commercial Share Alike License 3.0"),
"creative commons: attribution - non derived work - non commercial 3.0": c_("Creative Commons Attribution Non-commercial No Derivatives License 3.0"),
"creative commons: attribution 4.0": c_("Creative Commons Attribution License 4.0"),
"creative commons: attribution - share alike 4.0": c_("Creative Commons Attribution Share Alike License 4.0"),
"creative commons: attribution - non derived work 4.0": c_("Creative Commons Attribution No Derivatives License 4.0"),
"creative commons: attribution - non commercial 4.0": c_("Creative Commons Attribution Non-commercial License 4.0"),
"creative commons: attribution - non commercial - share alike 4.0": c_("Creative Commons Attribution Non-commercial Share Alike License 4.0"),
"creative commons: attribution - non derived work - non commercial 4.0": c_("Creative Commons Attribution Non-commercial No Derivatives License 4.0"),
"free software license GPL": c_("GNU General Public License")
}
html = ""
plicense = self.node.package.license
if plicense in licenses:
html += '<p align="center">'
html += c_("Licensed under the")
html += ' <a rel="license" href="%s">%s</a>' % (licenses[plicense], licenses_names[plicense])
if plicense == 'license GFDL':
html += ' <a href="fdl.html">(%s)</a>' % c_('Local Version')
html += '</p>'
return html
def renderFooter(self):
"""
Returns an XHTML string rendering the footer.
"""
dT = common.getExportDocType()
footerTag = "div"
if dT == "HTML5":
footerTag = "footer"
html = ""
if self.node.package.footer != "":
html += '<' + footerTag + ' id="siteFooter">'
html += self.node.package.footer + "</" + footerTag + ">"
return html
# ===========================================================================
def uniquifyNames(pages):
"""
Make sure all the page names are unique
"""
pageNames = {}
# First identify the duplicate names
for page in pages:
if page.name in pageNames:
pageNames[page.name] = 1
else:
pageNames[page.name] = 0
# Then uniquify them
for page in pages:
uniquifier = pageNames[page.name]
if uniquifier:
pageNames[page.name] = uniquifier + 1
page.name += unicode(uniquifier)
# for export, temporarily set this unique name on the node itself,
# such that any links to it can use the proper target; also
# including the quote() & ".html", as per WebsitePage's:
page.node.tmp_export_filename = quote(page.name) + ".html"
| kohnle-lernmodule/exe201based | exe/export/pages.py | Python | gpl-2.0 | 8,659 | 0.004735 |
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from essential.config import cfg
CONF = cfg.CONF
opt = cfg.StrOpt('foo')
CONF.register_opt(opt, group='fbar')
| gaolichuang/py-essential | tests/testmods/fbar_foo_opt.py | Python | apache-2.0 | 719 | 0 |
"""
Created on 4/18/17
@author: Numan Laanait -- nlaanait@gmail.com
"""
#MIT License
#Copyright (c) 2017 Numan Laanait
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
import pyxrim as px
import os
# define data paths
datadir = os.path.join(os.getcwd(),'data')
specfile = os.path.join(datadir,'BFO_STO_1_1.spec')
imagedir = os.path.join(datadir,'images')
# load ioHDF5
io = px.ioHDF5('test.h5')
io.scans_export(specfile,imagedir)
io.close()
| nlaanait/pyxrim | examples/hdf5_export.py | Python | mit | 1,439 | 0.014593 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateWorkload
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-assured-workloads
# [START assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_async]
from google.cloud import assuredworkloads_v1
async def sample_create_workload():
# Create a client
client = assuredworkloads_v1.AssuredWorkloadsServiceAsyncClient()
# Initialize request argument(s)
workload = assuredworkloads_v1.Workload()
workload.display_name = "display_name_value"
workload.compliance_regime = "CA_REGIONS_AND_SUPPORT"
workload.billing_account = "billing_account_value"
request = assuredworkloads_v1.CreateWorkloadRequest(
parent="parent_value",
workload=workload,
)
# Make the request
operation = client.create_workload(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END assuredworkloads_v1_generated_AssuredWorkloadsService_CreateWorkload_async]
| googleapis/python-assured-workloads | samples/generated_samples/assuredworkloads_v1_generated_assured_workloads_service_create_workload_async.py | Python | apache-2.0 | 1,877 | 0.001598 |
import numpy as np
from skimage import io
def read_image(fn, normalize=True):
"""Read a CCD/CMOS image in .da format (Redshirt). [1_]
Parameters
----------
fn : string
The input filename.
Returns
-------
images : array, shape (nrow, ncol, nframes)
The images (normalized by the dark frame if desired).
frame_interval : float
The time elapsed between frames, in milliseconds.
bnc : array, shape (8, nframes)
The bnc data.
dark_frame : array, shape (nrow, ncol)
The dark frame by which the image data should be normalized.
Notes
-----
Interlaced images, as produced by the option "write directly to disk",
are not currently supported.
References
----------
.. [1] http://www.redshirtimaging.com/support/dfo.html
"""
data = np.fromfile(fn, dtype=np.int16)
header_size = 2560
header = data[:header_size]
ncols, nrows = map(int, header[384:386]) # prevent int16 overflow
nframes = int(header[4])
frame_interval = header[388] / 1000
acquisition_ratio = header[391]
if frame_interval >= 10:
frame_interval *= header[390] # dividing factor
image_size = nrows * ncols * nframes
bnc_start = header_size + image_size
images = np.reshape(np.array(data[header_size:bnc_start]),
(nrows, ncols, nframes))
bnc_end = bnc_start + 8 * acquisition_ratio * nframes
bnc = np.reshape(np.array(data[bnc_start:bnc_end]), (8, nframes * acquisition_ratio))
dark_frame = np.reshape(np.array(data[bnc_end:-8]), (nrows, ncols))
if normalize:
images -= dark_frame[..., np.newaxis]
return images, frame_interval, bnc, dark_frame
def convert_images(fns, normalize=True):
for fn in fns:
image, frame_interval, bnc, dark_frame = read_image(fn, normalize)
out_fn = fn[:-3] + '.tif'
out_fn_dark = fn[:-3] + '.dark_frame.tif'
io.imsave(out_fn, np.transpose(image, (2, 0, 1)),
plugin='tifffile', compress=1)
io.imsave(out_fn_dark, dark_frame, plugin='tifffile', compress=1)
| jni/python-redshirt | redshirt/read.py | Python | mit | 2,123 | 0.000471 |
# -*- coding: utf-8 -*-
"""
Version code adopted from Django development version.
https://github.com/django/django
"""
VERSION = (0, 7, 2, 'final', 0)
def get_version(version=None):
"""
Returns a PEP 386-compliant version number from VERSION.
"""
if version is None:
from modeltranslation import VERSION as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
def get_git_changeset():
"""
Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
TODO: Check if we can rely on services like read-the-docs to pick this up.
"""
import datetime
import os
import subprocess
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen(
'git log --pretty=format:%ct --quiet -1 HEAD', stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True, cwd=repo_dir,
universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')
| yaroslavprogrammer/django-modeltranslation | modeltranslation/__init__.py | Python | bsd-3-clause | 2,020 | 0 |
# ******************************************************************************
# Copyright 2014-2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ******************************************************************************
from neon.initializers import Constant, Gaussian
from neon.layers import Conv, Dropout, Pooling, Affine, GeneralizedCost
from neon.models import Model
from neon.transforms import Rectlin, Softmax, CrossEntropyMulti
def create_network():
# weight initialization
g1 = Gaussian(scale=0.01)
g5 = Gaussian(scale=0.005)
c0 = Constant(0)
c1 = Constant(1)
# model initialization
padding = {'pad_d': 1, 'pad_h': 1, 'pad_w': 1}
strides = {'str_d': 2, 'str_h': 2, 'str_w': 2}
layers = [
Conv((3, 3, 3, 64), padding=padding, init=g1, bias=c0, activation=Rectlin()),
Pooling((1, 2, 2), strides={'str_d': 1, 'str_h': 2, 'str_w': 2}),
Conv((3, 3, 3, 128), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Conv((3, 3, 3, 256), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Conv((3, 3, 3, 256), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Conv((3, 3, 3, 256), padding=padding, init=g1, bias=c1, activation=Rectlin()),
Pooling((2, 2, 2), strides=strides),
Affine(nout=2048, init=g5, bias=c1, activation=Rectlin()),
Dropout(keep=0.5),
Affine(nout=2048, init=g5, bias=c1, activation=Rectlin()),
Dropout(keep=0.5),
Affine(nout=101, init=g1, bias=c0, activation=Softmax())
]
return Model(layers=layers), GeneralizedCost(costfunc=CrossEntropyMulti())
| NervanaSystems/neon | examples/video-c3d/network.py | Python | apache-2.0 | 2,289 | 0.002184 |
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import ntpath
import os
import posixpath
import re
import subprocess
import sys
import gyp.common
import gyp.easy_xml as easy_xml
import gyp.generator.ninja as ninja_generator
import gyp.MSVSNew as MSVSNew
import gyp.MSVSProject as MSVSProject
import gyp.MSVSSettings as MSVSSettings
import gyp.MSVSToolFile as MSVSToolFile
import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
from gyp.common import OrderedSet
# TODO: Remove once bots are on 2.7, http://crbug.com/241769
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import gyp.ordered_dict
return gyp.ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
# IncrediBuild BuildConsole will parse the solution file, but then
# silently skip building the target causing hard to track down errors.
# Note that this only happens with the BuildConsole, and does not occur
# if IncrediBuild is executed from inside Visual Studio. This regex
# validates that the string looks like a GUID with all uppercase hex
# letters.
VALID_MSVS_GUID_CHARS = re.compile(r'^[A-F0-9\-]+$')
generator_default_variables = {
'DRIVER_PREFIX': '',
'DRIVER_SUFFIX': '.sys',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '.exe',
'STATIC_LIB_PREFIX': '',
'SHARED_LIB_PREFIX': '',
'STATIC_LIB_SUFFIX': '.lib',
'SHARED_LIB_SUFFIX': '.dll',
'INTERMEDIATE_DIR': '$(IntDir)',
'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
'OS': 'win',
'PRODUCT_DIR': '$(OutDir)',
'LIB_DIR': '$(OutDir)lib',
'RULE_INPUT_ROOT': '$(InputName)',
'RULE_INPUT_DIRNAME': '$(InputDir)',
'RULE_INPUT_EXT': '$(InputExt)',
'RULE_INPUT_NAME': '$(InputFileName)',
'RULE_INPUT_PATH': '$(InputPath)',
'CONFIGURATION_NAME': '$(ConfigurationName)',
}
# The msvs specific sections that hold paths
generator_additional_path_sections = [
'msvs_cygwin_dirs',
'msvs_props',
]
generator_additional_non_configuration_keys = [
'msvs_cygwin_dirs',
'msvs_cygwin_shell',
'msvs_large_pdb',
'msvs_shard',
'msvs_external_builder',
'msvs_external_builder_out_dir',
'msvs_external_builder_build_cmd',
'msvs_external_builder_clean_cmd',
'msvs_external_builder_clcompile_cmd',
'msvs_enable_winrt',
'msvs_requires_importlibrary',
'msvs_enable_winphone',
'msvs_application_type_revision',
'msvs_target_platform_version',
'msvs_target_platform_minversion',
]
generator_filelist_paths = None
# List of precompiled header related keys.
precomp_keys = [
'msvs_precompiled_header',
'msvs_precompiled_source',
]
cached_username = None
cached_domain = None
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
# 64-bit.
def _GetDomainAndUserName():
if sys.platform not in ('win32', 'cygwin'):
return ('DOMAIN', 'USERNAME')
global cached_username
global cached_domain
if not cached_domain or not cached_username:
domain = os.environ.get('USERDOMAIN')
username = os.environ.get('USERNAME')
if not domain or not username:
call = subprocess.Popen(['net', 'config', 'Workstation'],
stdout=subprocess.PIPE)
config = call.communicate()[0]
username_re = re.compile(r'^User name\s+(\S+)', re.MULTILINE)
username_match = username_re.search(config)
if username_match:
username = username_match.group(1)
domain_re = re.compile(r'^Logon domain\s+(\S+)', re.MULTILINE)
domain_match = domain_re.search(config)
if domain_match:
domain = domain_match.group(1)
cached_domain = domain
cached_username = username
return (cached_domain, cached_username)
fixpath_prefix = None
def _NormalizedSource(source):
"""Normalize the path.
But not if that gets rid of a variable, as this may expand to something
larger than one directory.
Arguments:
source: The path to be normalize.d
Returns:
The normalized path.
"""
normalized = os.path.normpath(source)
if source.count('$') == normalized.count('$'):
source = normalized
return source
def _FixPath(path):
"""Convert paths to a form that will make sense in a vcproj file.
Arguments:
path: The path to convert, may contain / etc.
Returns:
The path with all slashes made into backslashes.
"""
if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
path = os.path.join(fixpath_prefix, path)
path = path.replace('/', '\\')
path = _NormalizedSource(path)
if path and path[-1] == '\\':
path = path[:-1]
return path
def _FixPaths(paths):
"""Fix each of the paths of the list."""
return [_FixPath(i) for i in paths]
def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
list_excluded=True, msvs_version=None):
"""Converts a list split source file paths into a vcproj folder hierarchy.
Arguments:
sources: A list of source file paths split.
prefix: A list of source file path layers meant to apply to each of sources.
excluded: A set of excluded files.
msvs_version: A MSVSVersion object.
Returns:
A hierarchy of filenames and MSVSProject.Filter objects that matches the
layout of the source tree.
For example:
_ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
prefix=['joe'])
-->
[MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
"""
if not prefix: prefix = []
result = []
excluded_result = []
folders = OrderedDict()
# Gather files into the final result, excluded, or folders.
for s in sources:
if len(s) == 1:
filename = _NormalizedSource('\\'.join(prefix + s))
if filename in excluded:
excluded_result.append(filename)
else:
result.append(filename)
elif msvs_version and not msvs_version.UsesVcxproj():
# For MSVS 2008 and earlier, we need to process all files before walking
# the sub folders.
if not folders.get(s[0]):
folders[s[0]] = []
folders[s[0]].append(s[1:])
else:
contents = _ConvertSourcesToFilterHierarchy([s[1:]], prefix + [s[0]],
excluded=excluded,
list_excluded=list_excluded,
msvs_version=msvs_version)
contents = MSVSProject.Filter(s[0], contents=contents)
result.append(contents)
# Add a folder for excluded files.
if excluded_result and list_excluded:
excluded_folder = MSVSProject.Filter('_excluded_files',
contents=excluded_result)
result.append(excluded_folder)
if msvs_version and msvs_version.UsesVcxproj():
return result
# Populate all the folders.
for f in folders:
contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
excluded=excluded,
list_excluded=list_excluded,
msvs_version=msvs_version)
contents = MSVSProject.Filter(f, contents=contents)
result.append(contents)
return result
def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
if not value: return
_ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
# TODO(bradnelson): ugly hack, fix this more generally!!!
if 'Directories' in setting or 'Dependencies' in setting:
if type(value) == str:
value = value.replace('/', '\\')
else:
value = [i.replace('/', '\\') for i in value]
if not tools.get(tool_name):
tools[tool_name] = dict()
tool = tools[tool_name]
if 'CompileAsWinRT' == setting:
return
if tool.get(setting):
if only_if_unset: return
if type(tool[setting]) == list and type(value) == list:
tool[setting] += value
else:
raise TypeError(
'Appending "%s" to a non-list setting "%s" for tool "%s" is '
'not allowed, previous value: %s' % (
value, setting, tool_name, str(tool[setting])))
else:
tool[setting] = value
def _ConfigTargetVersion(config_data):
return config_data.get('msvs_target_version', 'Windows7')
def _ConfigPlatform(config_data):
return config_data.get('msvs_configuration_platform', 'Win32')
def _ConfigBaseName(config_name, platform_name):
if config_name.endswith('_' + platform_name):
return config_name[0:-len(platform_name) - 1]
else:
return config_name
def _ConfigFullName(config_name, config_data):
platform_name = _ConfigPlatform(config_data)
return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
def _ConfigWindowsTargetPlatformVersion(config_data):
ver = config_data.get('msvs_windows_sdk_version')
for key in [r'HKLM\Software\Microsoft\Microsoft SDKs\Windows\%s',
r'HKLM\Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows\%s']:
sdk_dir = MSVSVersion._RegistryGetValue(key % ver, 'InstallationFolder')
if not sdk_dir:
continue
version = MSVSVersion._RegistryGetValue(key % ver, 'ProductVersion') or ''
# Find a matching entry in sdk_dir\include.
names = sorted([x for x in os.listdir(r'%s\include' % sdk_dir)
if x.startswith(version)], reverse=True)
return names[0]
def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
quote_cmd, do_setup_env, attached_to):
if [x for x in cmd if '$(InputDir)' in x]:
input_dir_preamble = (
'set INPUTDIR=$(InputDir)\n'
'if NOT DEFINED INPUTDIR set INPUTDIR=.\\\n'
'set INPUTDIR=%INPUTDIR:~0,-1%\n'
)
else:
input_dir_preamble = ''
if cygwin_shell:
# Find path to cygwin.
cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
# Prepare command.
direct_cmd = cmd
direct_cmd = [i.replace('$(IntDir)',
'`cygpath -m "${INTDIR}"`') for i in direct_cmd]
direct_cmd = [i.replace('$(OutDir)',
'`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
direct_cmd = [i.replace('$(InputDir)',
'`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
if has_input_path:
direct_cmd = [i.replace('$(InputPath)',
'`cygpath -m "${INPUTPATH}"`')
for i in direct_cmd]
direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
# direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
direct_cmd = ' '.join(direct_cmd)
# TODO(quote): regularize quoting path names throughout the module
cmd = ''
if do_setup_env:
cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
cmd += 'set CYGWIN=nontsec&& '
if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
if direct_cmd.find('INTDIR') >= 0:
cmd += 'set INTDIR=$(IntDir)&& '
if direct_cmd.find('OUTDIR') >= 0:
cmd += 'set OUTDIR=$(OutDir)&& '
if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
cmd += 'set INPUTPATH=$(InputPath) && '
cmd += 'bash -c "%(cmd)s"'
cmd = cmd % {'cygwin_dir': cygwin_dir,
'cmd': direct_cmd}
return input_dir_preamble + cmd
else:
# Convert cat --> type to mimic unix.
if cmd[0] == 'cat':
command = ['type']
else:
command = [cmd[0].replace('/', '\\')]
# Add call before command to ensure that commands can be tied together one
# after the other without aborting in Incredibuild, since IB makes a bat
# file out of the raw command string, and some commands (like python) are
# actually batch files themselves.
command.insert(0, 'call')
# Fix the paths
# TODO(quote): This is a really ugly heuristic, and will miss path fixing
# for arguments like "--arg=path" or "/opt:path".
# If the argument starts with a slash or dash, it's probably a command line
# switch
arguments=[]
CWD = _NormalizedSource(".")
ccwd = _FixPath(".")
for i in cmd[1:]:
guessfn = os.path.join(CWD, os.path.dirname(attached_to), i)
if (i[:1] == "-" or ( "/" not in i and not os.path.exists(guessfn))):
arguments.append(i)
else:
arguments.append(_FixPath(i))
arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
if quote_cmd:
# Support a mode for using cmd directly.
# Convert any paths to native form (first element is used directly).
# TODO(quote): regularize quoting path names throughout the module
arguments = ['"%s"' % i for i in arguments]
# Collapse into a single command.
return input_dir_preamble + ' '.join(command + arguments)
def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env, attached_to):
# Currently this weird argument munging is used to duplicate the way a
# python script would need to be run as part of the chrome tree.
# Eventually we should add some sort of rule_default option to set this
# per project. For now the behavior chrome needs is the default.
mcs = rule.get('msvs_cygwin_shell')
if mcs is None:
mcs = int(spec.get('msvs_cygwin_shell', 1))
elif isinstance(mcs, str):
mcs = int(mcs)
quote_cmd = int(rule.get('msvs_quote_cmd', 1))
mcs = 0; # force non-cygwin case since its SLLLOOUUWWW
return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
quote_cmd, do_setup_env=do_setup_env,
attached_to=attached_to)
def _AddActionStep(actions_dict, inputs, outputs, description, command):
"""Merge action into an existing list of actions.
Care must be taken so that actions which have overlapping inputs either don't
get assigned to the same input, or get collapsed into one.
Arguments:
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
inputs: list of inputs
outputs: list of outputs
description: description of the action
command: command line to execute
"""
# Require there to be at least one input (call sites will ensure this).
assert inputs
action = {
'inputs': inputs,
'outputs': outputs,
'description': description,
'command': command,
}
# Pick where to stick this action.
# While less than optimal in terms of build time, attach them to the first
# input for now.
chosen_input = inputs[0]
# Add it there.
if chosen_input not in actions_dict:
actions_dict[chosen_input] = []
actions_dict[chosen_input].append(action)
def _AddCustomBuildToolForMSVS(p, spec, primary_input,
inputs, outputs, description, cmd):
"""Add a custom build tool to execute something.
Arguments:
p: the target project
spec: the target project dict
primary_input: input file to attach the build tool to
inputs: list of inputs
outputs: list of outputs
description: description of the action
cmd: command line to execute
"""
inputs = _FixPaths(inputs)
outputs = _FixPaths(outputs)
tool = MSVSProject.Tool(
'VCCustomBuildTool',
{'Description': description,
'AdditionalDependencies': ';'.join(inputs),
'Outputs': ';'.join(outputs),
'CommandLine': cmd,
})
# Add to the properties of primary input for each config.
for config_name, c_data in spec['configurations'].iteritems():
p.AddFileConfig(_FixPath(primary_input),
_ConfigFullName(config_name, c_data), tools=[tool])
def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
"""Add actions accumulated into an actions_dict, merging as needed.
Arguments:
p: the target project
spec: the target project dict
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
"""
for primary_input in actions_dict:
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
commands = []
for action in actions_dict[primary_input]:
inputs.update(OrderedSet(action['inputs']))
outputs.update(OrderedSet(action['outputs']))
descriptions.append(action['description'])
commands.append(action['command'])
# Add the custom build step for one input file.
description = ', and also '.join(descriptions)
command = '\r\n'.join(commands)
_AddCustomBuildToolForMSVS(p, spec,
primary_input=primary_input,
inputs=inputs,
outputs=outputs,
description=description,
cmd=command)
def _RuleExpandPath(path, input_file):
"""Given the input file to which a rule applied, string substitute a path.
Arguments:
path: a path to string expand
input_file: the file to which the rule applied.
Returns:
The string substituted path.
"""
path = path.replace('$(InputName)',
os.path.splitext(os.path.split(input_file)[1])[0])
path = path.replace('$(InputDir)', os.path.dirname(input_file))
path = path.replace('$(InputExt)',
os.path.splitext(os.path.split(input_file)[1])[1])
path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
path = path.replace('$(InputPath)', input_file)
return path
def _FindRuleTriggerFiles(rule, sources):
"""Find the list of files which a particular rule applies to.
Arguments:
rule: the rule in question
sources: the set of all known source files for this project
Returns:
The list of sources that trigger a particular rule.
"""
return rule.get('rule_sources', [])
def _RuleInputsAndOutputs(rule, trigger_file):
"""Find the inputs and outputs generated by a rule.
Arguments:
rule: the rule in question.
trigger_file: the main trigger for this rule.
Returns:
The pair of (inputs, outputs) involved in this rule.
"""
raw_inputs = _FixPaths(rule.get('inputs', []))
raw_outputs = _FixPaths(rule.get('outputs', []))
inputs = OrderedSet()
outputs = OrderedSet()
inputs.add(trigger_file)
for i in raw_inputs:
inputs.add(_RuleExpandPath(i, trigger_file))
for o in raw_outputs:
outputs.add(_RuleExpandPath(o, trigger_file))
return (inputs, outputs)
def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
"""Generate a native rules file.
Arguments:
p: the target project
rules: the set of rules to include
output_dir: the directory in which the project/gyp resides
spec: the project dict
options: global generator options
"""
rules_filename = '%s%s.rules' % (spec['target_name'],
options.suffix)
rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
spec['target_name'])
# Add each rule.
for r in rules:
rule_name = r['rule_name']
rule_ext = r['extension']
inputs = _FixPaths(r.get('inputs', []))
outputs = _FixPaths(r.get('outputs', []))
# Skip a rule with no action and no inputs.
if 'action' not in r and not r.get('rule_sources', []):
continue
cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
do_setup_env=True)
rules_file.AddCustomBuildRule(name=rule_name,
description=r.get('message', rule_name),
extensions=[rule_ext],
additional_dependencies=inputs,
outputs=outputs,
cmd=cmd)
# Write out rules file.
rules_file.WriteIfChanged()
# Add rules file to project.
p.AddToolFile(rules_filename)
def _Cygwinify(path):
path = path.replace('$(OutDir)', '$(OutDirCygwin)')
path = path.replace('$(IntDir)', '$(IntDirCygwin)')
return path
def _GenerateExternalRules(rules, output_dir, spec,
sources, options, actions_to_add):
"""Generate an external makefile to do a set of rules.
Arguments:
rules: the list of rules to include
output_dir: path containing project and gyp files
spec: project specification data
sources: set of sources known
options: global generator options
actions_to_add: The list of actions we will add to.
"""
filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
# Find cygwin style versions of some paths.
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
# Gather stuff needed to emit all: target.
all_inputs = OrderedSet()
all_outputs = OrderedSet()
all_output_dirs = OrderedSet()
first_outputs = []
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
all_inputs.update(OrderedSet(inputs))
all_outputs.update(OrderedSet(outputs))
# Only use one target from each rule as the dependency for
# 'all' so we don't try to build each rule multiple times.
first_outputs.append(list(outputs)[0])
# Get the unique output directories for this rule.
output_dirs = [os.path.split(i)[0] for i in outputs]
for od in output_dirs:
all_output_dirs.add(od)
first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
# Write out all: target, including mkdir for each output directory.
mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
for od in all_output_dirs:
if od:
mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
mk_file.write('\n')
# Define how each output is generated.
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
# Get all the inputs and outputs for this rule for this trigger file.
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
inputs = [_Cygwinify(i) for i in inputs]
outputs = [_Cygwinify(i) for i in outputs]
# Prepare the command line for this rule.
cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
cmd = ['"%s"' % i for i in cmd]
cmd = ' '.join(cmd)
# Add it to the makefile.
mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
mk_file.write('\t%s\n\n' % cmd)
# Close up the file.
mk_file.close()
# Add makefile to list of sources.
sources.add(filename)
# Add a build action to call makefile.
cmd = ['make',
'OutDir=$(OutDir)',
'IntDir=$(IntDir)',
'-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
'-f', filename]
cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
# Insert makefile as 0'th input, so it gets the action attached there,
# as this is easier to understand from in the IDE.
all_inputs = list(all_inputs)
all_inputs.insert(0, filename)
_AddActionStep(actions_to_add,
inputs=_FixPaths(all_inputs),
outputs=_FixPaths(all_outputs),
description='Running external rules for %s' %
spec['target_name'],
command=cmd)
def _EscapeEnvironmentVariableExpansion(s):
"""Escapes % characters.
Escapes any % characters so that Windows-style environment variable
expansions will leave them alone.
See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
to understand why we have to do this.
Args:
s: The string to be escaped.
Returns:
The escaped string.
"""
s = s.replace('%', '%%')
return s
quote_replacer_regex = re.compile(r'(\\*)"')
def _EscapeCommandLineArgumentForMSVS(s):
"""Escapes a Windows command-line argument.
So that the Win32 CommandLineToArgv function will turn the escaped result back
into the original string.
See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
("Parsing C++ Command-Line Arguments") to understand why we have to do
this.
Args:
s: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a literal quote, CommandLineToArgv requires an odd number of
# backslashes preceding it, and it produces half as many literal backslashes
# (rounded down). So we need to produce 2n+1 backslashes.
return 2 * match.group(1) + '\\"'
# Escape all quotes so that they are interpreted literally.
s = quote_replacer_regex.sub(_Replace, s)
# Now add unescaped quotes so that any whitespace is interpreted literally.
s = '"' + s + '"'
return s
delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
def _EscapeVCProjCommandLineArgListItem(s):
"""Escapes command line arguments for MSVS.
The VCProj format stores string lists in a single string using commas and
semi-colons as separators, which must be quoted if they are to be
interpreted literally. However, command-line arguments may already have
quotes, and the VCProj parser is ignorant of the backslash escaping
convention used by CommandLineToArgv, so the command-line quotes and the
VCProj quotes may not be the same quotes. So to store a general
command-line argument in a VCProj list, we need to parse the existing
quoting according to VCProj's convention and quote any delimiters that are
not already quoted by that convention. The quotes that we add will also be
seen by CommandLineToArgv, so if backslashes precede them then we also have
to escape those backslashes according to the CommandLineToArgv
convention.
Args:
s: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a non-literal quote, CommandLineToArgv requires an even number of
# backslashes preceding it, and it produces half as many literal
# backslashes. So we need to produce 2n backslashes.
return 2 * match.group(1) + '"' + match.group(2) + '"'
segments = s.split('"')
# The unquoted segments are at the even-numbered indices.
for i in range(0, len(segments), 2):
segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
# Concatenate back into a single string
s = '"'.join(segments)
if len(segments) % 2 == 0:
# String ends while still quoted according to VCProj's convention. This
# means the delimiter and the next list item that follow this one in the
# .vcproj file will be misinterpreted as part of this item. There is nothing
# we can do about this. Adding an extra quote would correct the problem in
# the VCProj but cause the same problem on the final command-line. Moving
# the item to the end of the list does works, but that's only possible if
# there's only one such item. Let's just warn the user.
print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
'quotes in ' + s)
return s
def _EscapeCppDefineForMSVS(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSVS(s)
s = _EscapeVCProjCommandLineArgListItem(s)
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
s = s.replace('#', '\\%03o' % ord('#'))
return s
quote_replacer_regex2 = re.compile(r'(\\+)"')
def _EscapeCommandLineArgumentForMSBuild(s):
"""Escapes a Windows command-line argument for use by MSBuild."""
def _Replace(match):
return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
# Escape all quotes so that they are interpreted literally.
s = quote_replacer_regex2.sub(_Replace, s)
return s
def _EscapeMSBuildSpecialCharacters(s):
escape_dictionary = {
'%': '%25',
'$': '%24',
'@': '%40',
"'": '%27',
';': '%3B',
'?': '%3F',
'*': '%2A'
}
result = ''.join([escape_dictionary.get(c, c) for c in s])
return result
def _EscapeCppDefineForMSBuild(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSBuild(s)
s = _EscapeMSBuildSpecialCharacters(s)
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
s = s.replace('#', '\\%03o' % ord('#'))
return s
def _GenerateRulesForMSVS(p, output_dir, options, spec,
sources, excluded_sources,
actions_to_add):
"""Generate all the rules for a particular project.
Arguments:
p: the project
output_dir: directory to emit rules to
options: global options passed to the generator
spec: the specification for this project
sources: the set of all known source files in this project
excluded_sources: the set of sources excluded from normal processing
actions_to_add: deferred list of actions to add in
"""
rules = spec.get('rules', [])
rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
# Handle rules that use a native rules file.
if rules_native:
_GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
# Handle external rules (non-native rules).
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
_AdjustSourcesForRules(rules, sources, excluded_sources, False)
def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
# Add outputs generated by each rule (if applicable).
for rule in rules:
# Add in the outputs from this rule.
trigger_files = _FindRuleTriggerFiles(rule, sources)
for trigger_file in trigger_files:
# Remove trigger_file from excluded_sources to let the rule be triggered
# (e.g. rule trigger ax_enums.idl is added to excluded_sources
# because it's also in an action's inputs in the same project)
excluded_sources.discard(_FixPath(trigger_file))
# Done if not processing outputs as sources.
if int(rule.get('process_outputs_as_sources', False)):
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
inputs = OrderedSet(_FixPaths(inputs))
outputs = OrderedSet(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
if not is_msbuild:
excluded_sources.update(inputs)
sources.update(outputs)
def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
"""Take inputs with actions attached out of the list of exclusions.
Arguments:
excluded_sources: list of source files not to be built.
actions_to_add: dict of actions keyed on source file they're attached to.
Returns:
excluded_sources with files that have actions attached removed.
"""
must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if s not in must_keep]
def _GetDefaultConfiguration(spec):
return spec['configurations'][spec['default_configuration']]
def _GetGuidOfProject(proj_path, spec):
"""Get the guid for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
Returns:
the guid.
Raises:
ValueError: if the specified GUID is invalid.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
# Decide the guid of the project.
guid = default_config.get('msvs_guid')
if guid:
if VALID_MSVS_GUID_CHARS.match(guid) is None:
raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
(guid, VALID_MSVS_GUID_CHARS.pattern))
guid = '{%s}' % guid
guid = guid or MSVSNew.MakeGuid(proj_path)
return guid
def _GetMsbuildToolsetOfProject(proj_path, spec, version):
"""Get the platform toolset for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
version: The MSVSVersion object.
Returns:
the platform toolset string or None.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
toolset = default_config.get('msbuild_toolset')
if not toolset and version.DefaultToolset():
toolset = version.DefaultToolset()
if spec['type'] == 'windows_driver':
toolset = 'WindowsKernelModeDriver10.0'
return toolset
def _GenerateProject(project, options, version, generator_flags):
"""Generates a vcproj file.
Arguments:
project: the MSVSProject object.
options: global generator options.
version: the MSVSVersion object.
generator_flags: dict of generator-specific flags.
Returns:
A list of source files that cannot be found on disk.
"""
default_config = _GetDefaultConfiguration(project.spec)
# Skip emitting anything if told to with msvs_existing_vcproj option.
if default_config.get('msvs_existing_vcproj'):
return []
if version.UsesVcxproj():
return _GenerateMSBuildProject(project, options, version, generator_flags)
else:
return _GenerateMSVSProject(project, options, version, generator_flags)
# TODO: Avoid code duplication with _ValidateSourcesForOSX in make.py.
def _ValidateSourcesForMSVSProject(spec, version):
"""Makes sure if duplicate basenames are not specified in the source list.
Arguments:
spec: The target dictionary containing the properties of the target.
version: The VisualStudioVersion object.
"""
# This validation should not be applied to MSVC2010 and later.
assert not version.UsesVcxproj()
# TODO: Check if MSVC allows this for loadable_module targets.
if spec.get('type', None) not in ('static_library', 'shared_library'):
return
sources = spec.get('sources', [])
basenames = {}
for source in sources:
name, ext = os.path.splitext(source)
is_compiled_file = ext in [
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
if not is_compiled_file:
continue
basename = os.path.basename(name) # Don't include extension.
basenames.setdefault(basename, []).append(source)
error = ''
for basename, files in basenames.iteritems():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
print('static library %s has several files with the same basename:\n' %
spec['target_name'] + error + 'MSVC08 cannot handle that.')
raise GypError('Duplicate basenames in sources section, see list above')
def _GenerateMSVSProject(project, options, version, generator_flags):
"""Generates a .vcproj file. It may create .rules and .user files too.
Arguments:
project: The project object we will generate the file for.
options: Global options passed to the generator.
version: The VisualStudioVersion object.
generator_flags: dict of generator-specific flags.
"""
spec = project.spec
gyp.common.EnsureDirExists(project.path)
platforms = _GetUniquePlatforms(spec)
p = MSVSProject.Writer(project.path, version, spec['target_name'],
project.guid, platforms)
# Get directory project file is in.
project_dir = os.path.split(project.path)[0]
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
config_type = _GetMSVSConfigurationType(spec, project.build_file)
for config_name, config in spec['configurations'].iteritems():
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
# MSVC08 and prior version cannot handle duplicate basenames in the same
# target.
# TODO: Take excluded sources into consideration if possible.
_ValidateSourcesForMSVSProject(spec, version)
# Prepare list of sources and excluded sources.
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
gyp_file)
# Add rules.
actions_to_add = {}
_GenerateRulesForMSVS(p, project_dir, options, spec,
sources, excluded_sources,
actions_to_add)
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options, project_dir,
sources, excluded_sources,
list_excluded, version))
# Add in files.
missing_sources = _VerifySourcesExist(sources, project_dir)
p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec)
_HandlePreCompiledHeaders(p, sources, spec)
_AddActions(actions_to_add, spec, relative_path_of_gyp_file)
_AddCopies(actions_to_add, spec)
_WriteMSVSUserFile(project.path, version, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(
excluded_sources, actions_to_add)
_ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
list_excluded)
_AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
# Write it out.
p.WriteIfChanged()
return missing_sources
def _GetUniquePlatforms(spec):
"""Returns the list of unique platforms for this spec, e.g ['win32', ...].
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
The MSVSUserFile object created.
"""
# Gather list of unique platforms.
platforms = OrderedSet()
for configuration in spec['configurations']:
platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
platforms = list(platforms)
return platforms
def _CreateMSVSUserFile(proj_path, version, spec):
"""Generates a .user file for the user running this Gyp program.
Arguments:
proj_path: The path of the project file being created. The .user file
shares the same path (with an appropriate suffix).
version: The VisualStudioVersion object.
spec: The target dictionary containing the properties of the target.
Returns:
The MSVSUserFile object created.
"""
(domain, username) = _GetDomainAndUserName()
vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
user_file = MSVSUserFile.Writer(vcuser_filename, version,
spec['target_name'])
return user_file
def _GetMSVSConfigurationType(spec, build_file):
"""Returns the configuration type for this project.
It's a number defined by Microsoft. May raise an exception.
Args:
spec: The target dictionary containing the properties of the target.
build_file: The path of the gyp file.
Returns:
An integer, the configuration type.
"""
try:
config_type = {
'executable': '1', # .exe
'shared_library': '2', # .dll
'loadable_module': '2', # .dll
'static_library': '4', # .lib
'windows_driver': '5', # .sys
'none': '10', # Utility type
}[spec['type']]
except KeyError:
if spec.get('type'):
raise GypError('Target type %s is not a valid target type for '
'target %s in %s.' %
(spec['type'], spec['target_name'], build_file))
else:
raise GypError('Missing type field for target %s in %s.' %
(spec['target_name'], build_file))
return config_type
def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
"""Adds a configuration to the MSVS project.
Many settings in a vcproj file are specific to a configuration. This
function the main part of the vcproj file that's configuration specific.
Arguments:
p: The target project being generated.
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
config: The dictionary that defines the special processing to be done
for this configuration.
"""
# Get the information for this configuration
include_dirs, midl_include_dirs, resource_include_dirs = \
_GetIncludeDirs(config)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(config)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(config)
prebuild = config.get('msvs_prebuild')
postbuild = config.get('msvs_postbuild')
def_file = _GetModuleDefinition(spec)
precompiled_header = config.get('msvs_precompiled_header')
# Prepare the list of tools as a dictionary.
tools = dict()
# Add in user specified msvs_settings.
msvs_settings = config.get('msvs_settings', {})
MSVSSettings.ValidateMSVSSettings(msvs_settings)
# Prevent default library inheritance from the environment.
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
for tool in msvs_settings:
settings = config['msvs_settings'][tool]
for setting in settings:
_ToolAppend(tools, tool, setting, settings[setting])
# Add the information to the appropriate tool
_ToolAppend(tools, 'VCCLCompilerTool',
'AdditionalIncludeDirectories', include_dirs)
_ToolAppend(tools, 'VCMIDLTool',
'AdditionalIncludeDirectories', midl_include_dirs)
_ToolAppend(tools, 'VCResourceCompilerTool',
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries.
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalLibraryDirectories',
library_dirs)
if out_file:
_ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
# Add defines.
_ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
_ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
defines)
# Change program database directory to prevent collisions.
_ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
'$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
# Add disabled warnings.
_ToolAppend(tools, 'VCCLCompilerTool',
'DisableSpecificWarnings', disabled_warnings)
# Add Pre-build.
_ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
# Add Post-build.
_ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
_ToolAppend(tools, 'VCCLCompilerTool',
'PrecompiledHeaderThrough', precompiled_header)
_ToolAppend(tools, 'VCCLCompilerTool',
'ForcedIncludeFiles', precompiled_header)
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
_ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
# Set the module definition file if any.
if def_file:
_ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
_AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
def _GetIncludeDirs(config):
"""Returns the list of directories to be used for #include directives.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
"""
# TODO(bradnelson): include_dirs should really be flexible enough not to
# require this sort of thing.
include_dirs = (
config.get('include_dirs', []) +
config.get('msvs_system_include_dirs', []))
midl_include_dirs = (
config.get('midl_include_dirs', []) +
config.get('msvs_system_include_dirs', []))
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
include_dirs = _FixPaths(include_dirs)
midl_include_dirs = _FixPaths(midl_include_dirs)
resource_include_dirs = _FixPaths(resource_include_dirs)
return include_dirs, midl_include_dirs, resource_include_dirs
def _GetLibraryDirs(config):
"""Returns the list of directories to be used for library search paths.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
"""
library_dirs = config.get('library_dirs', [])
library_dirs = _FixPaths(library_dirs)
return library_dirs
def _GetLibraries(spec):
"""Returns the list of libraries for this configuration.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
The list of directory paths.
"""
libraries = spec.get('libraries', [])
# Strip out -l, as it is not used on windows (but is needed so we can pass
# in libraries that are assumed to be in the default library path).
# Also remove duplicate entries, leaving only the last duplicate, while
# preserving order.
found = OrderedSet()
unique_libraries_list = []
for entry in reversed(libraries):
library = re.sub(r'^\-l', '', entry)
if not os.path.splitext(library)[1]:
library += '.lib'
if library not in found:
found.add(library)
unique_libraries_list.append(library)
unique_libraries_list.reverse()
return unique_libraries_list
def _GetOutputFilePathAndTool(spec, msbuild):
"""Returns the path and tool to use for this target.
Figures out the path of the file this spec will create and the name of
the VC tool that will create it.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
A triple of (file path, name of the vc tool, name of the msbuild tool)
"""
# Select a name for the output file.
out_file = ''
vc_tool = ''
msbuild_tool = ''
output_file_map = {
'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
'windows_driver': ('VCLinkerTool', 'Link', '$(OutDir)', '.sys'),
'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
}
output_file_props = output_file_map.get(spec['type'])
if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
vc_tool, msbuild_tool, out_dir, suffix = output_file_props
if spec.get('standalone_static_library', 0):
out_dir = '$(OutDir)'
out_dir = spec.get('product_dir', out_dir)
product_extension = spec.get('product_extension')
if product_extension:
suffix = '.' + product_extension
elif msbuild:
suffix = '$(TargetExt)'
prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)')
out_file = ntpath.join(out_dir, prefix + product_name + suffix)
return out_file, vc_tool, msbuild_tool
def _GetOutputTargetExt(spec):
"""Returns the extension for this target, including the dot
If product_extension is specified, set target_extension to this to avoid
MSB8012, returns None otherwise. Ignores any target_extension settings in
the input files.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
A string with the extension, or None
"""
target_extension = spec.get('product_extension')
if target_extension:
return '.' + target_extension
return None
def _GetDefines(config):
"""Returns the list of preprocessor definitions for this configuation.
Arguments:
config: The dictionary that defines the special processing to be done
for this configuration.
Returns:
The list of preprocessor definitions.
"""
defines = []
for d in config.get('defines', []):
if type(d) == list:
fd = '='.join([str(dpart) for dpart in d])
else:
fd = str(d)
defines.append(fd)
return defines
def _GetDisabledWarnings(config):
return [str(i) for i in config.get('msvs_disabled_warnings', [])]
def _GetModuleDefinition(spec):
def_file = ''
if spec['type'] in ['shared_library', 'loadable_module', 'executable',
'windows_driver']:
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
def_file = _FixPath(def_files[0])
elif def_files:
raise ValueError(
'Multiple module definition files in one target, target %s lists '
'multiple .def files: %s' % (
spec['target_name'], ' '.join(def_files)))
return def_file
def _ConvertToolsToExpectedForm(tools):
"""Convert tools to a form expected by Visual Studio.
Arguments:
tools: A dictionary of settings; the tool name is the key.
Returns:
A list of Tool objects.
"""
tool_list = []
for tool, settings in tools.iteritems():
# Collapse settings with lists.
settings_fixed = {}
for setting, value in settings.iteritems():
if type(value) == list:
if ((tool == 'VCLinkerTool' and
setting == 'AdditionalDependencies') or
setting == 'AdditionalOptions'):
settings_fixed[setting] = ' '.join(value)
else:
settings_fixed[setting] = ';'.join(value)
else:
settings_fixed[setting] = value
# Add in this tool.
tool_list.append(MSVSProject.Tool(tool, settings_fixed))
return tool_list
def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
"""Add to the project file the configuration specified by config.
Arguments:
p: The target project being generated.
spec: the target project dict.
tools: A dictionary of settings; the tool name is the key.
config: The dictionary that defines the special processing to be done
for this configuration.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
"""
attributes = _GetMSVSAttributes(spec, config, config_type)
# Add in this configuration.
tool_list = _ConvertToolsToExpectedForm(tools)
p.AddConfig(_ConfigFullName(config_name, config),
attrs=attributes, tools=tool_list)
def _GetMSVSAttributes(spec, config, config_type):
# Prepare configuration attributes.
prepared_attrs = {}
source_attrs = config.get('msvs_configuration_attributes', {})
for a in source_attrs:
prepared_attrs[a] = source_attrs[a]
# Add props files.
vsprops_dirs = config.get('msvs_props', [])
vsprops_dirs = _FixPaths(vsprops_dirs)
if vsprops_dirs:
prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
# Set configuration type.
prepared_attrs['ConfigurationType'] = config_type
output_dir = prepared_attrs.get('OutputDirectory',
'$(SolutionDir)$(ConfigurationName)')
prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
if 'IntermediateDirectory' not in prepared_attrs:
intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
else:
intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
prepared_attrs['IntermediateDirectory'] = intermediate
return prepared_attrs
def _AddNormalizedSources(sources_set, sources_array):
sources_set.update(_NormalizedSource(s) for s in sources_array)
def _PrepareListOfSources(spec, generator_flags, gyp_file):
"""Prepare list of sources and excluded sources.
Besides the sources specified directly in the spec, adds the gyp file so
that a change to it will cause a re-compile. Also adds appropriate sources
for actions and copies. Assumes later stage will un-exclude files which
have custom build steps attached.
Arguments:
spec: The target dictionary containing the properties of the target.
gyp_file: The name of the gyp file.
Returns:
A pair of (list of sources, list of excluded sources).
The sources will be relative to the gyp file.
"""
sources = OrderedSet()
_AddNormalizedSources(sources, spec.get('sources', []))
excluded_sources = OrderedSet()
# Add in the gyp file.
if not generator_flags.get('standalone'):
sources.add(gyp_file)
# Add in 'action' inputs and outputs.
for a in spec.get('actions', []):
inputs = a['inputs']
inputs = [_NormalizedSource(i) for i in inputs]
# Add all inputs to sources and excluded sources.
inputs = OrderedSet(inputs)
sources.update(inputs)
if not spec.get('msvs_external_builder'):
excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
# Add in 'copies' inputs and outputs.
for cpy in spec.get('copies', []):
_AddNormalizedSources(sources, cpy.get('files', []))
return (sources, excluded_sources)
def _AdjustSourcesAndConvertToFilterHierarchy(
spec, options, gyp_dir, sources, excluded_sources, list_excluded, version):
"""Adjusts the list of sources and excluded sources.
Also converts the sets to lists.
Arguments:
spec: The target dictionary containing the properties of the target.
options: Global generator options.
gyp_dir: The path to the gyp file being processed.
sources: A set of sources to be included for this project.
excluded_sources: A set of sources to be excluded for this project.
version: A MSVSVersion object.
Returns:
A trio of (list of sources, list of excluded sources,
path of excluded IDL file)
"""
# Exclude excluded sources coming into the generator.
excluded_sources.update(OrderedSet(spec.get('sources_excluded', [])))
# Add excluded sources into sources for good measure.
sources.update(excluded_sources)
# Convert to proper windows form.
# NOTE: sources goes from being a set to a list here.
# NOTE: excluded_sources goes from being a set to a list here.
sources = _FixPaths(sources)
# Convert to proper windows form.
excluded_sources = _FixPaths(excluded_sources)
excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
precompiled_related = _GetPrecompileRelatedFiles(spec)
# Find the excluded ones, minus the precompiled header related ones.
fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
# Convert to folders and the right slashes.
sources = [i.split('\\') for i in sources]
sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
list_excluded=list_excluded,
msvs_version=version)
# Prune filters with a single child to flatten ugly directory structures
# such as ../../src/modules/module1 etc.
if version.UsesVcxproj():
while all([isinstance(s, MSVSProject.Filter) for s in sources]) \
and len(set([s.name for s in sources])) == 1:
assert all([len(s.contents) == 1 for s in sources])
sources = [s.contents[0] for s in sources]
else:
while len(sources) == 1 and isinstance(sources[0], MSVSProject.Filter):
sources = sources[0].contents
return sources, excluded_sources, excluded_idl
def _IdlFilesHandledNonNatively(spec, sources):
# If any non-native rules use 'idl' as an extension exclude idl files.
# Gather a list here to use later.
using_idl = False
for rule in spec.get('rules', []):
if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
using_idl = True
break
if using_idl:
excluded_idl = [i for i in sources if i.endswith('.idl')]
else:
excluded_idl = []
return excluded_idl
def _GetPrecompileRelatedFiles(spec):
# Gather a list of precompiled header related sources.
precompiled_related = []
for _, config in spec['configurations'].iteritems():
for k in precomp_keys:
f = config.get(k)
if f:
precompiled_related.append(_FixPath(f))
return precompiled_related
def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
list_excluded):
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
for file_name, excluded_configs in exclusions.iteritems():
if (not list_excluded and
len(excluded_configs) == len(spec['configurations'])):
# If we're not listing excluded files, then they won't appear in the
# project, so don't try to configure them to be excluded.
pass
else:
for config_name, config in excluded_configs:
p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
{'ExcludedFromBuild': 'true'})
def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
exclusions = {}
# Exclude excluded sources from being built.
for f in excluded_sources:
excluded_configs = []
for config_name, config in spec['configurations'].iteritems():
precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
# Don't do this for ones that are precompiled header related.
if f not in precomped:
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
# If any non-native rules use 'idl' as an extension exclude idl files.
# Exclude them now.
for f in excluded_idl:
excluded_configs = []
for config_name, config in spec['configurations'].iteritems():
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
return exclusions
def _AddToolFilesToMSVS(p, spec):
# Add in tool files (rules).
tool_files = OrderedSet()
for _, config in spec['configurations'].iteritems():
for f in config.get('msvs_tool_files', []):
tool_files.add(f)
for f in tool_files:
p.AddToolFile(f)
def _HandlePreCompiledHeaders(p, sources, spec):
# Pre-compiled header source stubs need a different compiler flag
# (generate precompiled header) and any source file not of the same
# kind (i.e. C vs. C++) as the precompiled header source stub needs
# to have use of precompiled headers disabled.
extensions_excluded_from_precompile = []
for config_name, config in spec['configurations'].iteritems():
source = config.get('msvs_precompiled_source')
if source:
source = _FixPath(source)
# UsePrecompiledHeader=1 for if using precompiled headers.
tool = MSVSProject.Tool('VCCLCompilerTool',
{'UsePrecompiledHeader': '1'})
p.AddFileConfig(source, _ConfigFullName(config_name, config),
{}, tools=[tool])
basename, extension = os.path.splitext(source)
if extension == '.c':
extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
else:
extensions_excluded_from_precompile = ['.c']
def DisableForSourceTree(source_tree):
for source in source_tree:
if isinstance(source, MSVSProject.Filter):
DisableForSourceTree(source.contents)
else:
basename, extension = os.path.splitext(source)
if extension in extensions_excluded_from_precompile:
for config_name, config in spec['configurations'].iteritems():
tool = MSVSProject.Tool('VCCLCompilerTool',
{'UsePrecompiledHeader': '0',
'ForcedIncludeFiles': '$(NOINHERIT)'})
p.AddFileConfig(_FixPath(source),
_ConfigFullName(config_name, config),
{}, tools=[tool])
# Do nothing if there was no precompiled source.
if extensions_excluded_from_precompile:
DisableForSourceTree(sources)
def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
# Add actions.
actions = spec.get('actions', [])
# Don't setup_env every time. When all the actions are run together in one
# batch file in VS, the PATH will grow too long.
# Membership in this set means that the cygwin environment has been set up,
# and does not need to be set up again.
have_setup_env = set()
for a in actions:
# Attach actions to the gyp file if nothing else is there.
inputs = a.get('inputs') or [relative_path_of_gyp_file]
attached_to = inputs[0]
need_setup_env = attached_to not in have_setup_env
cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
do_setup_env=need_setup_env,
attached_to=relative_path_of_gyp_file)
have_setup_env.add(attached_to)
# Add the action.
_AddActionStep(actions_to_add,
inputs=inputs,
outputs=a.get('outputs', []),
description=a.get('message', a['action_name']),
command=cmd)
def _WriteMSVSUserFile(project_path, version, spec):
# Add run_as and test targets.
if 'run_as' in spec:
run_as = spec['run_as']
action = run_as.get('action', [])
environment = run_as.get('environment', [])
working_directory = run_as.get('working_directory', '.')
elif int(spec.get('test', 0)):
action = ['$(TargetPath)', '--gtest_print_time']
environment = []
working_directory = '.'
else:
return # Nothing to add
# Write out the user file.
user_file = _CreateMSVSUserFile(project_path, version, spec)
for config_name, c_data in spec['configurations'].iteritems():
user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
action, environment, working_directory)
user_file.WriteIfChanged()
def _AddCopies(actions_to_add, spec):
copies = _GetCopies(spec)
for inputs, outputs, cmd, description in copies:
_AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
description=description, command=cmd)
def _GetCopies(spec):
copies = []
# Add copies.
for cpy in spec.get('copies', []):
for src in cpy.get('files', []):
dst = os.path.join(cpy['destination'], os.path.basename(src))
# _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
# outputs, so do the same for our generated command line.
if src.endswith('/'):
src_bare = src[:-1]
base_dir = posixpath.split(src_bare)[0]
outer_dir = posixpath.split(src_bare)[1]
cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
_FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
copies.append(([src], ['dummy_copies', dst], cmd,
'Copying %s to %s' % (src, dst)))
else:
cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
_FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
return copies
def _GetPathDict(root, path):
# |path| will eventually be empty (in the recursive calls) if it was initially
# relative; otherwise it will eventually end up as '\', 'D:\', etc.
if not path or path.endswith(os.sep):
return root
parent, folder = os.path.split(path)
parent_dict = _GetPathDict(root, parent)
if folder not in parent_dict:
parent_dict[folder] = dict()
return parent_dict[folder]
def _DictsToFolders(base_path, bucket, flat):
# Convert to folders recursively.
children = []
for folder, contents in bucket.iteritems():
if type(contents) == dict:
folder_children = _DictsToFolders(os.path.join(base_path, folder),
contents, flat)
if flat:
children += folder_children
else:
folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
# parentheses not supported by msbuild target parameter /t
# name='(' + folder + ')',
name=folder,
entries=folder_children)
children.append(folder_children)
else:
children.append(contents)
return children
def _CollapseSingles(parent, node):
# Recursively explorer the tree of dicts looking for projects which are
# the sole item in a folder which has the same name as the project. Bring
# such projects up one level.
if (type(node) == dict and
len(node) == 1 and
node.keys()[0] == parent + '.vcproj'):
return node[node.keys()[0]]
if type(node) != dict:
return node
for child in node:
node[child] = _CollapseSingles(child, node[child])
return node
def _GatherSolutionFolders(sln_projects, project_objects, flat):
root = {}
# Convert into a tree of dicts on path.
for p in sln_projects:
gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
gyp_dir = os.path.dirname(gyp_file)
path_dict = _GetPathDict(root, gyp_dir)
path_dict[target + '.vcproj'] = project_objects[p]
# Walk down from the top until we hit a folder that has more than one entry.
# In practice, this strips the top-level "src/" dir from the hierarchy in
# the solution.
while len(root) == 1 and type(root[root.keys()[0]]) == dict:
root = root[root.keys()[0]]
# Collapse singles.
root = _CollapseSingles('', root)
# Merge buckets until everything is a root entry.
return _DictsToFolders('', root, flat)
def _GetPathOfProject(qualified_target, spec, options, msvs_version):
default_config = _GetDefaultConfiguration(spec)
proj_filename = default_config.get('msvs_existing_vcproj')
if not proj_filename:
proj_filename = (spec['target_name'] + options.suffix +
msvs_version.ProjectExtension())
build_file = gyp.common.BuildFile(qualified_target)
proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
fix_prefix = None
if options.generator_output:
project_dir_path = os.path.dirname(os.path.abspath(proj_path))
proj_path = os.path.join(options.generator_output, proj_path)
fix_prefix = gyp.common.RelativePath(project_dir_path,
os.path.dirname(proj_path))
return proj_path, fix_prefix
def _GetPlatformOverridesOfProject(spec):
# Prepare a dict indicating which project configurations are used for which
# solution configurations for this target.
config_platform_overrides = {}
for config_name, c in spec['configurations'].iteritems():
config_fullname = _ConfigFullName(config_name, c)
platform = c.get('msvs_target_platform', _ConfigPlatform(c))
fixed_config_fullname = '%s|%s' % (
_ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
config_platform_overrides[config_fullname] = fixed_config_fullname
return config_platform_overrides
def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
"""Create a MSVSProject object for the targets found in target list.
Arguments:
target_list: the list of targets to generate project objects for.
target_dicts: the dictionary of specifications.
options: global generator options.
msvs_version: the MSVSVersion object.
Returns:
A set of created projects, keyed by target.
"""
global fixpath_prefix
# Generate each project.
projects = {}
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise GypError(
'Multiple toolsets not supported in msvs build (target %s)' %
qualified_target)
proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
options, msvs_version)
guid = _GetGuidOfProject(proj_path, spec)
overrides = _GetPlatformOverridesOfProject(spec)
build_file = gyp.common.BuildFile(qualified_target)
# Create object for this project.
obj = MSVSNew.MSVSProject(
proj_path,
name=spec['target_name'],
guid=guid,
spec=spec,
build_file=build_file,
config_platform_overrides=overrides,
fixpath_prefix=fixpath_prefix)
# Set project toolset if any (MS build only)
if msvs_version.UsesVcxproj():
obj.set_msbuild_toolset(
_GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
projects[qualified_target] = obj
# Set all the dependencies, but not if we are using an external builder like
# ninja
for project in projects.values():
if not project.spec.get('msvs_external_builder'):
deps = project.spec.get('dependencies', [])
deps = [projects[d] for d in deps]
project.set_dependencies(deps)
return projects
def _InitNinjaFlavor(params, target_list, target_dicts):
"""Initialize targets for the ninja flavor.
This sets up the necessary variables in the targets to generate msvs projects
that use ninja as an external builder. The variables in the spec are only set
if they have not been set. This allows individual specs to override the
default values initialized here.
Arguments:
params: Params provided to the generator.
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
"""
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec.get('msvs_external_builder'):
# The spec explicitly defined an external builder, so don't change it.
continue
path_to_ninja = spec.get('msvs_path_to_ninja', 'ninja.exe')
spec['msvs_external_builder'] = 'ninja'
if not spec.get('msvs_external_builder_out_dir'):
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
gyp_dir = os.path.dirname(gyp_file)
configuration = '$(Configuration)'
if params.get('target_arch') == 'x64':
configuration += '_x64'
spec['msvs_external_builder_out_dir'] = os.path.join(
gyp.common.RelativePath(params['options'].toplevel_dir, gyp_dir),
ninja_generator.ComputeOutputDir(params),
configuration)
if not spec.get('msvs_external_builder_build_cmd'):
spec['msvs_external_builder_build_cmd'] = [
path_to_ninja,
'-C',
'$(OutDir)',
'$(ProjectName)',
]
if not spec.get('msvs_external_builder_clean_cmd'):
spec['msvs_external_builder_clean_cmd'] = [
path_to_ninja,
'-C',
'$(OutDir)',
'-tclean',
'$(ProjectName)',
]
def CalculateVariables(default_variables, params):
"""Generated variables that require params to be known."""
generator_flags = params.get('generator_flags', {})
# Select project file format version (if unset, default to auto detecting).
msvs_version = MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
# Stash msvs_version for later (so we don't have to probe the system twice).
params['msvs_version'] = msvs_version
# Set a variable so conditions can be based on msvs_version.
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
if gyp.common.GetFlavor(params) == 'ninja':
default_variables['SHARED_INTERMEDIATE_DIR'] = '$(OutDir)gen'
def PerformBuild(data, configurations, params):
options = params['options']
msvs_version = params['msvs_version']
devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
sln_path = build_file_root + options.suffix + '.sln'
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
for config in configurations:
arguments = [devenv, sln_path, '/Build', config]
print 'Building [%s]: %s' % (config, arguments)
rtn = subprocess.check_call(arguments)
def CalculateGeneratorInputInfo(params):
if params.get('flavor') == 'ninja':
toplevel = params['options'].toplevel_dir
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, ninja_generator.ComputeOutputDir(params),
'gypfiles-msvs-ninja'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files.
This is the entry point for this generator.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
data: Dictionary containing per .gyp data.
"""
global fixpath_prefix
options = params['options']
# Get the project file format version back out of where we stashed it in
# GeneratorCalculatedVariables.
msvs_version = params['msvs_version']
generator_flags = params.get('generator_flags', {})
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
(target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
# Optionally use the large PDB workaround for targets marked with
# 'msvs_large_pdb': 1.
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
# Optionally configure each spec to use ninja as the external builder.
if params.get('flavor') == 'ninja':
_InitNinjaFlavor(params, target_list, target_dicts)
# Prepare the set of configurations.
configs = set()
for qualified_target in target_list:
spec = target_dicts[qualified_target]
for config_name, config in spec['configurations'].iteritems():
configs.add(_ConfigFullName(config_name, config))
configs = list(configs)
# Figure out all the projects that will be generated and their guids
project_objects = _CreateProjectObjects(target_list, target_dicts, options,
msvs_version)
# Generate each project.
missing_sources = []
for project in project_objects.values():
fixpath_prefix = project.fixpath_prefix
missing_sources.extend(_GenerateProject(project, options, msvs_version,
generator_flags))
fixpath_prefix = None
for build_file in data:
# Validate build_file extension
if not build_file.endswith('.gyp'):
continue
sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
# Get projects in the solution, and their dependents.
sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
# Create folder hierarchy.
root_entries = _GatherSolutionFolders(
# ArangoDB V8 build script expects flat solution
# sln_projects, project_objects, flat=msvs_version.FlatSolution())
sln_projects, project_objects, flat=True)
# Create solution.
sln = MSVSNew.MSVSSolution(sln_path,
entries=root_entries,
variants=configs,
websiteProperties=False,
version=msvs_version)
sln.Write()
if missing_sources:
error_message = "Missing input files:\n" + \
'\n'.join(set(missing_sources))
if generator_flags.get('msvs_error_on_missing_sources', False):
raise GypError(error_message)
else:
print >> sys.stdout, "Warning: " + error_message
def _GenerateMSBuildFiltersFile(filters_path, source_files,
rule_dependencies, extension_to_rule_name):
"""Generate the filters file.
This file is used by Visual Studio to organize the presentation of source
files into folders.
Arguments:
filters_path: The path of the file to be created.
source_files: The hierarchical structure of all the sources.
extension_to_rule_name: A dictionary mapping file extensions to rules.
"""
filter_group = []
source_group = []
_AppendFiltersForMSBuild('', source_files, rule_dependencies,
extension_to_rule_name, filter_group, source_group)
if filter_group:
content = ['Project',
{'ToolsVersion': '4.0',
'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
},
['ItemGroup'] + filter_group,
['ItemGroup'] + source_group
]
easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
elif os.path.exists(filters_path):
# We don't need this filter anymore. Delete the old filter file.
os.unlink(filters_path)
def _AppendFiltersForMSBuild(parent_filter_name, sources, rule_dependencies,
extension_to_rule_name,
filter_group, source_group):
"""Creates the list of filters and sources to be added in the filter file.
Args:
parent_filter_name: The name of the filter under which the sources are
found.
sources: The hierarchy of filters and sources to process.
extension_to_rule_name: A dictionary mapping file extensions to rules.
filter_group: The list to which filter entries will be appended.
source_group: The list to which source entries will be appeneded.
"""
for source in sources:
if isinstance(source, MSVSProject.Filter):
# We have a sub-filter. Create the name of that sub-filter.
if not parent_filter_name:
filter_name = source.name
else:
filter_name = '%s\\%s' % (parent_filter_name, source.name)
# Add the filter to the group.
filter_group.append(
['Filter', {'Include': filter_name},
['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
# Recurse and add its dependents.
_AppendFiltersForMSBuild(filter_name, source.contents,
rule_dependencies, extension_to_rule_name,
filter_group, source_group)
else:
# It's a source. Create a source entry.
_, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
extension_to_rule_name)
source_entry = [element, {'Include': source}]
# Specify the filter it is part of, if any.
if parent_filter_name:
source_entry.append(['Filter', parent_filter_name])
source_group.append(source_entry)
def _MapFileToMsBuildSourceType(source, rule_dependencies,
extension_to_rule_name):
"""Returns the group and element type of the source file.
Arguments:
source: The source file name.
extension_to_rule_name: A dictionary mapping file extensions to rules.
Returns:
A pair of (group this file should be part of, the label of element)
"""
_, ext = os.path.splitext(source)
if ext in extension_to_rule_name:
group = 'rule'
element = extension_to_rule_name[ext]
elif ext in ['.cc', '.cpp', '.c', '.cxx']:
group = 'compile'
element = 'ClCompile'
elif ext in ['.h', '.hxx']:
group = 'include'
element = 'ClInclude'
elif ext == '.rc':
group = 'resource'
element = 'ResourceCompile'
elif ext == '.asm':
group = 'masm'
element = 'MASM'
elif ext == '.idl':
group = 'midl'
element = 'Midl'
elif source in rule_dependencies:
group = 'rule_dependency'
element = 'CustomBuild'
else:
group = 'none'
element = 'None'
return (group, element)
def _GenerateRulesForMSBuild(output_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
actions_to_add, rule_dependencies,
extension_to_rule_name):
# MSBuild rules are implemented using three files: an XML file, a .targets
# file and a .props file.
# See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
# for more details.
rules = spec.get('rules', [])
rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
msbuild_rules = []
for rule in rules_native:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
msbuild_rule = MSBuildRule(rule, spec)
msbuild_rules.append(msbuild_rule)
rule_dependencies.update(msbuild_rule.additional_dependencies.split(';'))
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
if msbuild_rules:
base = spec['target_name'] + options.suffix
props_name = base + '.props'
targets_name = base + '.targets'
xml_name = base + '.xml'
props_files_of_rules.add(props_name)
targets_files_of_rules.add(targets_name)
props_path = os.path.join(output_dir, props_name)
targets_path = os.path.join(output_dir, targets_name)
xml_path = os.path.join(output_dir, xml_name)
_GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
_GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
_GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
class MSBuildRule(object):
"""Used to store information used to generate an MSBuild rule.
Attributes:
rule_name: The rule name, sanitized to use in XML.
target_name: The name of the target.
after_targets: The name of the AfterTargets element.
before_targets: The name of the BeforeTargets element.
depends_on: The name of the DependsOn element.
compute_output: The name of the ComputeOutput element.
dirs_to_make: The name of the DirsToMake element.
inputs: The name of the _inputs element.
tlog: The name of the _tlog element.
extension: The extension this rule applies to.
description: The message displayed when this rule is invoked.
additional_dependencies: A string listing additional dependencies.
outputs: The outputs of this rule.
command: The command used to run the rule.
"""
def __init__(self, rule, spec):
self.display_name = rule['rule_name']
# Assure that the rule name is only characters and numbers
self.rule_name = re.sub(r'\W', '_', self.display_name)
# Create the various element names, following the example set by the
# Visual Studio 2008 to 2010 conversion. I don't know if VS2010
# is sensitive to the exact names.
self.target_name = '_' + self.rule_name
self.after_targets = self.rule_name + 'AfterTargets'
self.before_targets = self.rule_name + 'BeforeTargets'
self.depends_on = self.rule_name + 'DependsOn'
self.compute_output = 'Compute%sOutput' % self.rule_name
self.dirs_to_make = self.rule_name + 'DirsToMake'
self.inputs = self.rule_name + '_inputs'
self.tlog = self.rule_name + '_tlog'
self.extension = rule['extension']
if not self.extension.startswith('.'):
self.extension = '.' + self.extension
self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
rule.get('message', self.rule_name))
old_additional_dependencies = _FixPaths(rule.get('inputs', []))
self.additional_dependencies = (
';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
for i in old_additional_dependencies]))
old_outputs = _FixPaths(rule.get('outputs', []))
self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
for i in old_outputs])
old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
do_setup_env=True, attached_to="")
self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
"""Generate the .props file."""
content = ['Project',
{'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
for rule in msbuild_rules:
content.extend([
['PropertyGroup',
{'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
"'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
rule.after_targets)
},
[rule.before_targets, 'Midl'],
[rule.after_targets, 'CustomBuild'],
],
['PropertyGroup',
[rule.depends_on,
{'Condition': "'$(ConfigurationType)' != 'Makefile'"},
'_SelectedFiles;$(%s)' % rule.depends_on
],
],
['ItemDefinitionGroup',
[rule.rule_name,
['CommandLineTemplate', rule.command],
['Outputs', rule.outputs],
['ExecutionDescription', rule.description],
['AdditionalDependencies', rule.additional_dependencies],
],
]
])
easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
"""Generate the .targets file."""
content = ['Project',
{'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
}
]
item_group = [
'ItemGroup',
['PropertyPageSchema',
{'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
]
]
for rule in msbuild_rules:
item_group.append(
['AvailableItemName',
{'Include': rule.rule_name},
['Targets', rule.target_name],
])
content.append(item_group)
for rule in msbuild_rules:
content.append(
['UsingTask',
{'TaskName': rule.rule_name,
'TaskFactory': 'XamlTaskFactory',
'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
},
['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
])
for rule in msbuild_rules:
rule_name = rule.rule_name
target_outputs = '%%(%s.Outputs)' % rule_name
target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
'$(MSBuildProjectFile)') % (rule_name, rule_name)
rule_inputs = '%%(%s.Identity)' % rule_name
extension_condition = ("'%(Extension)'=='.obj' or "
"'%(Extension)'=='.res' or "
"'%(Extension)'=='.rsc' or "
"'%(Extension)'=='.lib'")
remove_section = [
'ItemGroup',
{'Condition': "'@(SelectedFiles)' != ''"},
[rule_name,
{'Remove': '@(%s)' % rule_name,
'Condition': "'%(Identity)' != '@(SelectedFiles)'"
}
]
]
inputs_section = [
'ItemGroup',
[rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
]
logging_section = [
'ItemGroup',
[rule.tlog,
{'Include': '%%(%s.Outputs)' % rule_name,
'Condition': ("'%%(%s.Outputs)' != '' and "
"'%%(%s.ExcludedFromBuild)' != 'true'" %
(rule_name, rule_name))
},
['Source', "@(%s, '|')" % rule_name],
['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
],
]
message_section = [
'Message',
{'Importance': 'High',
'Text': '%%(%s.ExecutionDescription)' % rule_name
}
]
write_tlog_section = [
'WriteLinesToFile',
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
'File': '$(IntDir)$(ProjectName).write.1.tlog',
'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
rule.tlog)
}
]
read_tlog_section = [
'WriteLinesToFile',
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
'File': '$(IntDir)$(ProjectName).read.1.tlog',
'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
}
]
command_and_input_section = [
rule_name,
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule_name, rule_name),
'EchoOff': 'true',
'StandardOutputImportance': 'High',
'StandardErrorImportance': 'High',
'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
'Inputs': rule_inputs
}
]
content.extend([
['Target',
{'Name': rule.target_name,
'BeforeTargets': '$(%s)' % rule.before_targets,
'AfterTargets': '$(%s)' % rule.after_targets,
'Condition': "'@(%s)' != ''" % rule_name,
'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
rule.compute_output),
'Outputs': target_outputs,
'Inputs': target_inputs
},
remove_section,
inputs_section,
logging_section,
message_section,
write_tlog_section,
read_tlog_section,
command_and_input_section,
],
['PropertyGroup',
['ComputeLinkInputsTargets',
'$(ComputeLinkInputsTargets);',
'%s;' % rule.compute_output
],
['ComputeLibInputsTargets',
'$(ComputeLibInputsTargets);',
'%s;' % rule.compute_output
],
],
['Target',
{'Name': rule.compute_output,
'Condition': "'@(%s)' != ''" % rule_name
},
['ItemGroup',
[rule.dirs_to_make,
{'Condition': "'@(%s)' != '' and "
"'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
'Include': '%%(%s.Outputs)' % rule_name
}
],
['Link',
{'Include': '%%(%s.Identity)' % rule.dirs_to_make,
'Condition': extension_condition
}
],
['Lib',
{'Include': '%%(%s.Identity)' % rule.dirs_to_make,
'Condition': extension_condition
}
],
['ImpLib',
{'Include': '%%(%s.Identity)' % rule.dirs_to_make,
'Condition': extension_condition
}
],
],
['MakeDir',
{'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
rule.dirs_to_make)
}
]
],
])
easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
# Generate the .xml file
content = [
'ProjectSchemaDefinitions',
{'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
'assembly=Microsoft.Build.Framework'),
'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
'xmlns:transformCallback':
'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
}
]
for rule in msbuild_rules:
content.extend([
['Rule',
{'Name': rule.rule_name,
'PageTemplate': 'tool',
'DisplayName': rule.display_name,
'Order': '200'
},
['Rule.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'ItemType': rule.rule_name
}
]
],
['Rule.Categories',
['Category',
{'Name': 'General'},
['Category.DisplayName',
['sys:String', 'General'],
],
],
['Category',
{'Name': 'Command Line',
'Subtype': 'CommandLine'
},
['Category.DisplayName',
['sys:String', 'Command Line'],
],
],
],
['StringListProperty',
{'Name': 'Inputs',
'Category': 'Command Line',
'IsRequired': 'true',
'Switch': ' '
},
['StringListProperty.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'ItemType': rule.rule_name,
'SourceType': 'Item'
}
]
],
],
['StringProperty',
{'Name': 'CommandLineTemplate',
'DisplayName': 'Command Line',
'Visible': 'False',
'IncludeInCommandLine': 'False'
}
],
['DynamicEnumProperty',
{'Name': rule.before_targets,
'Category': 'General',
'EnumProvider': 'Targets',
'IncludeInCommandLine': 'False'
},
['DynamicEnumProperty.DisplayName',
['sys:String', 'Execute Before'],
],
['DynamicEnumProperty.Description',
['sys:String', 'Specifies the targets for the build customization'
' to run before.'
],
],
['DynamicEnumProperty.ProviderSettings',
['NameValuePair',
{'Name': 'Exclude',
'Value': '^%s|^Compute' % rule.before_targets
}
]
],
['DynamicEnumProperty.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'HasConfigurationCondition': 'true'
}
]
],
],
['DynamicEnumProperty',
{'Name': rule.after_targets,
'Category': 'General',
'EnumProvider': 'Targets',
'IncludeInCommandLine': 'False'
},
['DynamicEnumProperty.DisplayName',
['sys:String', 'Execute After'],
],
['DynamicEnumProperty.Description',
['sys:String', ('Specifies the targets for the build customization'
' to run after.')
],
],
['DynamicEnumProperty.ProviderSettings',
['NameValuePair',
{'Name': 'Exclude',
'Value': '^%s|^Compute' % rule.after_targets
}
]
],
['DynamicEnumProperty.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'ItemType': '',
'HasConfigurationCondition': 'true'
}
]
],
],
['StringListProperty',
{'Name': 'Outputs',
'DisplayName': 'Outputs',
'Visible': 'False',
'IncludeInCommandLine': 'False'
}
],
['StringProperty',
{'Name': 'ExecutionDescription',
'DisplayName': 'Execution Description',
'Visible': 'False',
'IncludeInCommandLine': 'False'
}
],
['StringListProperty',
{'Name': 'AdditionalDependencies',
'DisplayName': 'Additional Dependencies',
'IncludeInCommandLine': 'False',
'Visible': 'false'
}
],
['StringProperty',
{'Subtype': 'AdditionalOptions',
'Name': 'AdditionalOptions',
'Category': 'Command Line'
},
['StringProperty.DisplayName',
['sys:String', 'Additional Options'],
],
['StringProperty.Description',
['sys:String', 'Additional Options'],
],
],
],
['ItemType',
{'Name': rule.rule_name,
'DisplayName': rule.display_name
}
],
['FileExtension',
{'Name': '*' + rule.extension,
'ContentType': rule.rule_name
}
],
['ContentType',
{'Name': rule.rule_name,
'DisplayName': '',
'ItemType': rule.rule_name
}
]
])
easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
def _GetConfigurationAndPlatform(name, settings):
configuration = name.rsplit('_', 1)[0]
platform = settings.get('msvs_configuration_platform', 'Win32')
return (configuration, platform)
def _GetConfigurationCondition(name, settings):
return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
_GetConfigurationAndPlatform(name, settings))
def _GetMSBuildProjectConfigurations(configurations):
group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
for (name, settings) in sorted(configurations.iteritems()):
configuration, platform = _GetConfigurationAndPlatform(name, settings)
designation = '%s|%s' % (configuration, platform)
group.append(
['ProjectConfiguration', {'Include': designation},
['Configuration', configuration],
['Platform', platform]])
return [group]
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
namespace = os.path.splitext(gyp_file_name)[0]
properties = [
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', guid],
['Keyword', 'Win32Proj'],
['RootNamespace', namespace],
['IgnoreWarnCompileDuplicatedFilename', 'true'],
]
]
if os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or \
os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64':
properties[0].append(['PreferredToolArchitecture', 'x64'])
if spec.get('msvs_target_platform_version'):
target_platform_version = spec.get('msvs_target_platform_version')
properties[0].append(['WindowsTargetPlatformVersion',
target_platform_version])
if spec.get('msvs_target_platform_minversion'):
target_platform_minversion = spec.get('msvs_target_platform_minversion')
properties[0].append(['WindowsTargetPlatformMinVersion',
target_platform_minversion])
else:
properties[0].append(['WindowsTargetPlatformMinVersion',
target_platform_version])
if spec.get('msvs_enable_winrt'):
properties[0].append(['DefaultLanguage', 'en-US'])
properties[0].append(['AppContainerApplication', 'true'])
if spec.get('msvs_application_type_revision'):
app_type_revision = spec.get('msvs_application_type_revision')
properties[0].append(['ApplicationTypeRevision', app_type_revision])
else:
properties[0].append(['ApplicationTypeRevision', '8.1'])
if spec.get('msvs_enable_winphone'):
properties[0].append(['ApplicationType', 'Windows Phone'])
else:
properties[0].append(['ApplicationType', 'Windows Store'])
platform_name = None
msvs_windows_sdk_version = None
for configuration in spec['configurations'].itervalues():
platform_name = platform_name or _ConfigPlatform(configuration)
msvs_windows_sdk_version = (msvs_windows_sdk_version or
_ConfigWindowsTargetPlatformVersion(configuration))
if platform_name and msvs_windows_sdk_version:
break
if platform_name == 'ARM':
properties[0].append(['WindowsSDKDesktopARMSupport', 'true'])
if msvs_windows_sdk_version:
properties[0].append(['WindowsTargetPlatformVersion',
str(msvs_windows_sdk_version)])
return properties
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
for name, settings in spec['configurations'].iteritems():
msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
condition = _GetConfigurationCondition(name, settings)
character_set = msbuild_attributes.get('CharacterSet')
config_type = msbuild_attributes.get('ConfigurationType')
_AddConditionalProperty(properties, condition, 'ConfigurationType',
config_type)
if config_type == 'Driver':
_AddConditionalProperty(properties, condition, 'DriverType', 'WDM')
_AddConditionalProperty(properties, condition, 'TargetVersion',
_ConfigTargetVersion(settings))
if character_set:
if 'msvs_enable_winrt' not in spec :
_AddConditionalProperty(properties, condition, 'CharacterSet',
character_set)
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
def _GetMSBuildLocalProperties(msbuild_toolset):
# Currently the only local property we support is PlatformToolset
properties = {}
if msbuild_toolset:
properties = [
['PropertyGroup', {'Label': 'Locals'},
['PlatformToolset', msbuild_toolset],
]
]
return properties
def _GetMSBuildPropertySheets(configurations):
user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
additional_props = {}
props_specified = False
for name, settings in sorted(configurations.iteritems()):
configuration = _GetConfigurationCondition(name, settings)
if settings.has_key('msbuild_props'):
additional_props[configuration] = _FixPaths(settings['msbuild_props'])
props_specified = True
else:
additional_props[configuration] = ''
if not props_specified:
return [
['ImportGroup',
{'Label': 'PropertySheets'},
['Import',
{'Project': user_props,
'Condition': "exists('%s')" % user_props,
'Label': 'LocalAppDataPlatform'
}
]
]
]
else:
sheets = []
for condition, props in additional_props.iteritems():
import_group = [
'ImportGroup',
{'Label': 'PropertySheets',
'Condition': condition
},
['Import',
{'Project': user_props,
'Condition': "exists('%s')" % user_props,
'Label': 'LocalAppDataPlatform'
}
]
]
for props_file in props:
import_group.append(['Import', {'Project':props_file}])
sheets.append(import_group)
return sheets
def _ConvertMSVSBuildAttributes(spec, config, build_file):
config_type = _GetMSVSConfigurationType(spec, build_file)
msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
msbuild_attributes = {}
for a in msvs_attributes:
if a in ['IntermediateDirectory', 'OutputDirectory']:
directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
if not directory.endswith('\\'):
directory += '\\'
msbuild_attributes[a] = directory
elif a == 'CharacterSet':
msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
elif a == 'ConfigurationType':
msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
else:
print 'Warning: Do not know how to convert MSVS attribute ' + a
return msbuild_attributes
def _ConvertMSVSCharacterSet(char_set):
if char_set.isdigit():
char_set = {
'0': 'MultiByte',
'1': 'Unicode',
'2': 'MultiByte',
}[char_set]
return char_set
def _ConvertMSVSConfigurationType(config_type):
if config_type.isdigit():
config_type = {
'1': 'Application',
'2': 'DynamicLibrary',
'4': 'StaticLibrary',
'5': 'Driver',
'10': 'Utility'
}[config_type]
return config_type
def _GetMSBuildAttributes(spec, config, build_file):
if 'msbuild_configuration_attributes' not in config:
msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
else:
config_type = _GetMSVSConfigurationType(spec, build_file)
config_type = _ConvertMSVSConfigurationType(config_type)
msbuild_attributes = config.get('msbuild_configuration_attributes', {})
msbuild_attributes.setdefault('ConfigurationType', config_type)
output_dir = msbuild_attributes.get('OutputDirectory',
'$(SolutionDir)$(Configuration)')
msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
if 'IntermediateDirectory' not in msbuild_attributes:
intermediate = _FixPath('$(Configuration)') + '\\'
msbuild_attributes['IntermediateDirectory'] = intermediate
if 'CharacterSet' in msbuild_attributes:
msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
msbuild_attributes['CharacterSet'])
if 'TargetName' not in msbuild_attributes:
prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)')
target_name = prefix + product_name
msbuild_attributes['TargetName'] = target_name
if spec.get('msvs_external_builder'):
external_out_dir = spec.get('msvs_external_builder_out_dir', '.')
msbuild_attributes['OutputDirectory'] = _FixPath(external_out_dir) + '\\'
# Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
# (depending on the tool used) to avoid MSB8012 warning.
msbuild_tool_map = {
'executable': 'Link',
'shared_library': 'Link',
'loadable_module': 'Link',
'windows_driver': 'Link',
'static_library': 'Lib',
}
msbuild_tool = msbuild_tool_map.get(spec['type'])
if msbuild_tool:
msbuild_settings = config['finalized_msbuild_settings']
out_file = msbuild_settings[msbuild_tool].get('OutputFile')
if out_file:
msbuild_attributes['TargetPath'] = _FixPath(out_file)
target_ext = msbuild_settings[msbuild_tool].get('TargetExt')
if target_ext:
msbuild_attributes['TargetExt'] = target_ext
return msbuild_attributes
def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
# TODO(jeanluc) We could optimize out the following and do it only if
# there are actions.
# TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
new_paths = []
cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
if cygwin_dirs:
cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
new_paths.append(cyg_path)
# TODO(jeanluc) Change the convention to have both a cygwin_dir and a
# python_dir.
python_path = cyg_path.replace('cygwin\\bin', 'python_26')
new_paths.append(python_path)
if new_paths:
new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
properties = {}
for (name, configuration) in sorted(configurations.iteritems()):
condition = _GetConfigurationCondition(name, configuration)
attributes = _GetMSBuildAttributes(spec, configuration, build_file)
msbuild_settings = configuration['finalized_msbuild_settings']
_AddConditionalProperty(properties, condition, 'IntDir',
attributes['IntermediateDirectory'])
_AddConditionalProperty(properties, condition, 'OutDir',
attributes['OutputDirectory'])
_AddConditionalProperty(properties, condition, 'TargetName',
attributes['TargetName'])
if attributes.get('TargetPath'):
_AddConditionalProperty(properties, condition, 'TargetPath',
attributes['TargetPath'])
if attributes.get('TargetExt'):
_AddConditionalProperty(properties, condition, 'TargetExt',
attributes['TargetExt'])
if new_paths:
_AddConditionalProperty(properties, condition, 'ExecutablePath',
new_paths)
tool_settings = msbuild_settings.get('', {})
for name, value in sorted(tool_settings.iteritems()):
formatted_value = _GetValueFormattedForMSBuild('', name, value)
_AddConditionalProperty(properties, condition, name, formatted_value)
return _GetMSBuildPropertyGroup(spec, None, properties)
def _AddConditionalProperty(properties, condition, name, value):
"""Adds a property / conditional value pair to a dictionary.
Arguments:
properties: The dictionary to be modified. The key is the name of the
property. The value is itself a dictionary; its key is the value and
the value a list of condition for which this value is true.
condition: The condition under which the named property has the value.
name: The name of the property.
value: The value of the property.
"""
if name not in properties:
properties[name] = {}
values = properties[name]
if value not in values:
values[value] = []
conditions = values[value]
conditions.append(condition)
# Regex for msvs variable references ( i.e. $(FOO) ).
MSVS_VARIABLE_REFERENCE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
def _GetMSBuildPropertyGroup(spec, label, properties):
"""Returns a PropertyGroup definition for the specified properties.
Arguments:
spec: The target project dict.
label: An optional label for the PropertyGroup.
properties: The dictionary to be converted. The key is the name of the
property. The value is itself a dictionary; its key is the value and
the value a list of condition for which this value is true.
"""
group = ['PropertyGroup']
if label:
group.append({'Label': label})
num_configurations = len(spec['configurations'])
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
edges = set()
for value in sorted(properties[node].keys()):
# Add to edges all $(...) references to variables.
#
# Variable references that refer to names not in properties are excluded
# These can exist for instance to refer built in definitions like
# $(SolutionDir).
#
# Self references are ignored. Self reference is used in a few places to
# append to the default value. I.e. PATH=$(PATH);other_path
edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
if v in properties and v != node]))
return edges
properties_ordered = gyp.common.TopologicallySorted(
properties.keys(), GetEdges)
# Walk properties in the reverse of a topological sort on
# user_of_variable -> used_variable as this ensures variables are
# defined before they are used.
# NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
for name in reversed(properties_ordered):
values = properties[name]
for value, conditions in sorted(values.iteritems()):
if len(conditions) == num_configurations:
# If the value is the same all configurations,
# just add one unconditional entry.
group.append([name, value])
else:
for condition in conditions:
group.append([name, {'Condition': condition}, value])
return [group]
def _GetMSBuildToolSettingsSections(spec, configurations):
groups = []
for (name, configuration) in sorted(configurations.iteritems()):
msbuild_settings = configuration['finalized_msbuild_settings']
group = ['ItemDefinitionGroup',
{'Condition': _GetConfigurationCondition(name, configuration)}
]
for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
# Skip the tool named '' which is a holder of global settings handled
# by _GetMSBuildConfigurationGlobalProperties.
if tool_name:
if tool_settings:
tool = [tool_name]
for name, value in sorted(tool_settings.iteritems()):
formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
value)
tool.append([name, formatted_value])
group.append(tool)
groups.append(group)
return groups
def _FinalizeMSBuildSettings(spec, configuration):
if 'msbuild_settings' in configuration:
converted = False
msbuild_settings = configuration['msbuild_settings']
MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
else:
converted = True
msvs_settings = configuration.get('msvs_settings', {})
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
include_dirs, midl_include_dirs, resource_include_dirs = \
_GetIncludeDirs(configuration)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(configuration)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
target_ext = _GetOutputTargetExt(spec)
defines = _GetDefines(configuration)
if converted:
# Visual Studio 2010 has TR1
defines = [d for d in defines if d != '_HAS_TR1=0']
# Warn of ignored settings
ignored_settings = ['msvs_tool_files']
for ignored_setting in ignored_settings:
value = configuration.get(ignored_setting)
if value:
print ('Warning: The automatic conversion to MSBuild does not handle '
'%s. Ignoring setting of %s' % (ignored_setting, str(value)))
defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(configuration)
prebuild = configuration.get('msvs_prebuild')
postbuild = configuration.get('msvs_postbuild')
def_file = _GetModuleDefinition(spec)
precompiled_header = configuration.get('msvs_precompiled_header')
# Add the information to the appropriate tool
# TODO(jeanluc) We could optimize and generate these settings only if
# the corresponding files are found, e.g. don't generate ResourceCompile
# if you don't have any resources.
_ToolAppend(msbuild_settings, 'ClCompile',
'AdditionalIncludeDirectories', include_dirs)
_ToolAppend(msbuild_settings, 'Midl',
'AdditionalIncludeDirectories', midl_include_dirs)
_ToolAppend(msbuild_settings, 'ResourceCompile',
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries, note that even for empty libraries, we want this
# set, to prevent inheriting default libraries from the enviroment.
_ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
libraries)
_ToolAppend(msbuild_settings, 'Link', 'AdditionalLibraryDirectories',
library_dirs)
if out_file:
_ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
only_if_unset=True)
if target_ext:
_ToolAppend(msbuild_settings, msbuild_tool, 'TargetExt', target_ext,
only_if_unset=True)
# Add defines.
_ToolAppend(msbuild_settings, 'ClCompile',
'PreprocessorDefinitions', defines)
_ToolAppend(msbuild_settings, 'ResourceCompile',
'PreprocessorDefinitions', defines)
# Add disabled warnings.
_ToolAppend(msbuild_settings, 'ClCompile',
'DisableSpecificWarnings', disabled_warnings)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
_ToolAppend(msbuild_settings, 'ClCompile',
'PrecompiledHeaderFile', precompiled_header)
_ToolAppend(msbuild_settings, 'ClCompile',
'ForcedIncludeFiles', [precompiled_header])
else:
_ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'NotUsing')
# Turn off WinRT compilation
_ToolAppend(msbuild_settings, 'ClCompile', 'CompileAsWinRT', 'false')
# Turn on import libraries if appropriate
if spec.get('msvs_requires_importlibrary'):
_ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'false')
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
_ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
# Set the module definition file if any.
if def_file:
_ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
configuration['finalized_msbuild_settings'] = msbuild_settings
if prebuild:
_ToolAppend(msbuild_settings, 'PreBuildEvent', 'Command', prebuild)
if postbuild:
_ToolAppend(msbuild_settings, 'PostBuildEvent', 'Command', postbuild)
def _GetValueFormattedForMSBuild(tool_name, name, value):
if type(value) == list:
# For some settings, VS2010 does not automatically extends the settings
# TODO(jeanluc) Is this what we want?
if name in ['AdditionalIncludeDirectories',
'AdditionalLibraryDirectories',
'AdditionalOptions',
'DelayLoadDLLs',
'DisableSpecificWarnings',
'PreprocessorDefinitions']:
value.append('%%(%s)' % name)
# For most tools, entries in a list should be separated with ';' but some
# settings use a space. Check for those first.
exceptions = {
'ClCompile': ['AdditionalOptions'],
'Link': ['AdditionalOptions'],
'Lib': ['AdditionalOptions']}
if tool_name in exceptions and name in exceptions[tool_name]:
char = ' '
else:
char = ';'
formatted_value = char.join(
[MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
else:
formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
return formatted_value
def _VerifySourcesExist(sources, root_dir):
"""Verifies that all source files exist on disk.
Checks that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation but no otherwise
visible errors.
Arguments:
sources: A recursive list of Filter/file names.
root_dir: The root directory for the relative path names.
Returns:
A list of source files that cannot be found on disk.
"""
missing_sources = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
else:
if '$' not in source:
full_path = os.path.join(root_dir, source)
if not os.path.exists(full_path):
missing_sources.append(full_path)
return missing_sources
def _GetMSBuildSources(spec, sources, exclusions, rule_dependencies,
extension_to_rule_name, actions_spec,
sources_handled_by_action, list_excluded):
groups = ['none', 'masm', 'midl', 'include', 'compile', 'resource', 'rule',
'rule_dependency']
grouped_sources = {}
for g in groups:
grouped_sources[g] = []
_AddSources2(spec, sources, exclusions, grouped_sources,
rule_dependencies, extension_to_rule_name,
sources_handled_by_action, list_excluded)
sources = []
for g in groups:
if grouped_sources[g]:
sources.append(['ItemGroup'] + grouped_sources[g])
if actions_spec:
sources.append(['ItemGroup'] + actions_spec)
return sources
def _AddSources2(spec, sources, exclusions, grouped_sources,
rule_dependencies, extension_to_rule_name,
sources_handled_by_action,
list_excluded):
extensions_excluded_from_precompile = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
_AddSources2(spec, source.contents, exclusions, grouped_sources,
rule_dependencies, extension_to_rule_name,
sources_handled_by_action,
list_excluded)
else:
if not source in sources_handled_by_action:
detail = []
excluded_configurations = exclusions.get(source, [])
if len(excluded_configurations) == len(spec['configurations']):
detail.append(['ExcludedFromBuild', 'true'])
else:
for config_name, configuration in sorted(excluded_configurations):
condition = _GetConfigurationCondition(config_name, configuration)
detail.append(['ExcludedFromBuild',
{'Condition': condition},
'true'])
# Add precompile if needed
for config_name, configuration in spec['configurations'].iteritems():
precompiled_source = configuration.get('msvs_precompiled_source', '')
if precompiled_source != '':
precompiled_source = _FixPath(precompiled_source)
if not extensions_excluded_from_precompile:
# If the precompiled header is generated by a C source, we must
# not try to use it for C++ sources, and vice versa.
basename, extension = os.path.splitext(precompiled_source)
if extension == '.c':
extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
else:
extensions_excluded_from_precompile = ['.c']
if precompiled_source == source:
condition = _GetConfigurationCondition(config_name, configuration)
detail.append(['PrecompiledHeader',
{'Condition': condition},
'Create'
])
else:
# Turn off precompiled header usage for source files of a
# different type than the file that generated the
# precompiled header.
for extension in extensions_excluded_from_precompile:
if source.endswith(extension):
detail.append(['PrecompiledHeader', ''])
detail.append(['ForcedIncludeFiles', ''])
group, element = _MapFileToMsBuildSourceType(source, rule_dependencies,
extension_to_rule_name)
grouped_sources[group].append([element, {'Include': source}] + detail)
def _GetMSBuildProjectReferences(project):
references = []
if project.dependencies:
group = ['ItemGroup']
for dependency in project.dependencies:
guid = dependency.guid
project_dir = os.path.split(project.path)[0]
relative_path = gyp.common.RelativePath(dependency.path, project_dir)
project_ref = ['ProjectReference',
{'Include': relative_path},
['Project', guid],
['ReferenceOutputAssembly', 'false']
]
for config in dependency.spec.get('configurations', {}).itervalues():
# If it's disabled in any config, turn it off in the reference.
if config.get('msvs_2010_disable_uldi_when_referenced', 0):
project_ref.append(['UseLibraryDependencyInputs', 'false'])
break
group.append(project_ref)
references.append(group)
return references
def _GenerateMSBuildProject(project, options, version, generator_flags):
spec = project.spec
configurations = spec['configurations']
project_dir, project_file_name = os.path.split(project.path)
gyp.common.EnsureDirExists(project.path)
# Prepare list of sources and excluded sources.
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
gyp_file)
# Add rules.
actions_to_add = {}
props_files_of_rules = set()
targets_files_of_rules = set()
rule_dependencies = set()
extension_to_rule_name = {}
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
# Don't generate rules if we are using an external builder like ninja.
if not spec.get('msvs_external_builder'):
_GenerateRulesForMSBuild(project_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
actions_to_add, rule_dependencies,
extension_to_rule_name)
else:
rules = spec.get('rules', [])
_AdjustSourcesForRules(rules, sources, excluded_sources, True)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
project_dir, sources,
excluded_sources,
list_excluded, version))
# Don't add actions if we are using an external builder like ninja.
if not spec.get('msvs_external_builder'):
_AddActions(actions_to_add, spec, project.build_file)
_AddCopies(actions_to_add, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(
excluded_sources, actions_to_add)
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
spec, actions_to_add)
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
rule_dependencies,
extension_to_rule_name)
missing_sources = _VerifySourcesExist(sources, project_dir)
for configuration in configurations.itervalues():
_FinalizeMSBuildSettings(spec, configuration)
# Add attributes to root element
import_default_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
import_cpp_props_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
import_cpp_targets_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
import_masm_props_section = [
['Import',
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.props'}]]
import_masm_targets_section = [
['Import',
{'Project': r'$(VCTargetsPath)\BuildCustomizations\masm.targets'}]]
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
content = [
'Project',
{'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
'ToolsVersion': version.ProjectVersion(),
'DefaultTargets': 'Build'
}]
content += _GetMSBuildProjectConfigurations(configurations)
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
if spec.get('msvs_enable_winphone'):
content += _GetMSBuildLocalProperties('v120_wp81')
else:
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
content += import_cpp_props_section
content += import_masm_props_section
content += _GetMSBuildExtensions(props_files_of_rules)
content += _GetMSBuildPropertySheets(configurations)
content += macro_section
content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
project.build_file)
content += _GetMSBuildToolSettingsSections(spec, configurations)
content += _GetMSBuildSources(
spec, sources, exclusions, rule_dependencies, extension_to_rule_name,
actions_spec, sources_handled_by_action, list_excluded)
content += _GetMSBuildProjectReferences(project)
content += import_cpp_targets_section
content += import_masm_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
if spec.get('msvs_external_builder'):
content += _GetMSBuildExternalBuilderTargets(spec)
# TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
# has_run_as = _WriteMSVSUserFile(project.path, version, spec)
easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
return missing_sources
def _GetMSBuildExternalBuilderTargets(spec):
"""Return a list of MSBuild targets for external builders.
The "Build" and "Clean" targets are always generated. If the spec contains
'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
be generated, to support building selected C/C++ files.
Arguments:
spec: The gyp target spec.
Returns:
List of MSBuild 'Target' specs.
"""
build_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_build_cmd'],
False, False, False, False, attached_to="")
build_target = ['Target', {'Name': 'Build'}]
build_target.append(['Exec', {'Command': build_cmd}])
clean_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_clean_cmd'],
False, False, False, False, attached_to="")
clean_target = ['Target', {'Name': 'Clean'}]
clean_target.append(['Exec', {'Command': clean_cmd}])
targets = [build_target, clean_target]
if spec.get('msvs_external_builder_clcompile_cmd'):
clcompile_cmd = _BuildCommandLineForRuleRaw(
spec, spec['msvs_external_builder_clcompile_cmd'],
False, False, False, False, attached_to="")
clcompile_target = ['Target', {'Name': 'ClCompile'}]
clcompile_target.append(['Exec', {'Command': clcompile_cmd}])
targets.append(clcompile_target)
return targets
def _GetMSBuildExtensions(props_files_of_rules):
extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
for props_file in props_files_of_rules:
extensions.append(['Import', {'Project': props_file}])
return [extensions]
def _GetMSBuildExtensionTargets(targets_files_of_rules):
targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
for targets_file in sorted(targets_files_of_rules):
targets_node.append(['Import', {'Project': targets_file}])
return [targets_node]
def _GenerateActionsForMSBuild(spec, actions_to_add):
"""Add actions accumulated into an actions_to_add, merging as needed.
Arguments:
spec: the target project dict
actions_to_add: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
Returns:
A pair of (action specification, the sources handled by this action).
"""
sources_handled_by_action = OrderedSet()
actions_spec = []
for primary_input, actions in actions_to_add.iteritems():
inputs = OrderedSet()
outputs = OrderedSet()
descriptions = []
commands = []
for action in actions:
inputs.update(OrderedSet(action['inputs']))
outputs.update(OrderedSet(action['outputs']))
descriptions.append(action['description'])
cmd = action['command']
# For most actions, add 'call' so that actions that invoke batch files
# return and continue executing. msbuild_use_call provides a way to
# disable this but I have not seen any adverse effect from doing that
# for everything.
if action.get('msbuild_use_call', True):
cmd = 'call ' + cmd
commands.append(cmd)
# Add the custom build action for one input file.
description = ', and also '.join(descriptions)
# We can't join the commands simply with && because the command line will
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
# for every invocation or the command that sets the PATH will grow too
# long.
command = '\r\n'.join([c + '\r\nif %errorlevel% neq 0 exit /b %errorlevel%'
for c in commands])
_AddMSBuildAction(spec,
primary_input,
inputs,
outputs,
command,
description,
sources_handled_by_action,
actions_spec)
return actions_spec, sources_handled_by_action
def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
sources_handled_by_action, actions_spec):
command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
primary_input = _FixPath(primary_input)
inputs_array = _FixPaths(inputs)
outputs_array = _FixPaths(outputs)
additional_inputs = ';'.join([i for i in inputs_array
if i != primary_input])
outputs = ';'.join(outputs_array)
sources_handled_by_action.add(primary_input)
action_spec = ['CustomBuild', {'Include': primary_input}]
action_spec.extend(
# TODO(jeanluc) 'Document' for all or just if as_sources?
[['FileType', 'Document'],
['Command', command],
['Message', description],
['Outputs', outputs]
])
if additional_inputs:
action_spec.append(['AdditionalInputs', additional_inputs])
actions_spec.append(action_spec)
| hkernbach/arangodb | 3rdParty/V8/v5.7.492.77/tools/gyp/pylib/gyp/generator/msvs.py | Python | apache-2.0 | 133,929 | 0.009796 |
"""
Generate simulated logs, placing new items into the queue.
Process the Queue, generating summary data and
appending entries to the log store
"""
import analyzer
import simulatedLogs
# simulate_logs
simulatedLogs.simulate()
# process queue
analyzer.complete()
| rgardler/acs-logging-test | src/runSimulation.py | Python | apache-2.0 | 269 | 0.003717 |
"""
03-delayed-calls.py - Calling a function once, after a given delay.
If you want to setup a callback once in the future, the CallAfter
object is very easy to use. You just give it the function name, the
time to wait before making the call and an optional argument.
"""
from pyo import *
s = Server().boot()
# A four-streams oscillator to produce a chord.
amp = Fader(fadein=0.005, fadeout=0.05, mul=0.2).play()
osc = SineLoop(freq=[0, 0, 0, 0], feedback=0.05, mul=amp)
rev = WGVerb(osc.mix(2), feedback=0.8, cutoff=4000, bal=0.2).out()
# A function to change the oscillator's frequencies and start the envelope.
def set_osc_freqs(notes):
print(notes)
osc.set(attr="freq", value=midiToHz(list(notes)), port=0.005)
amp.play()
# Initial chord.
set_osc_freqs([60, 64, 67, 72])
# We must be sure that our CallAfter object stays alive as long as
# it waits to call its function. If we don't keep a reference to it,
# it will be garbage-collected before doing its job.
call = None
def new_notes(notes):
global call # Use a global variable.
amp.stop() # Start the fadeout of the current notes...
# ... then, 50 ms later, call the function that change the frequencies.
call = CallAfter(set_osc_freqs, time=0.05, arg=notes)
# The sequence of events. We use a tuple for the list of frequencies
# because PyoObjects spread lists as argument over all their internal
# streams. This means that with a list of frequencies, only the first
# frequency would be passed to the callback of the first (and single)
# stream (a list of functions at first argument would create a
# multi-stream object). A tuple is treated as a single argument.
c1 = CallAfter(new_notes, time=0.95, arg=(60, 64, 67, 69))
c2 = CallAfter(new_notes, time=1.95, arg=(60, 65, 69, 76))
c3 = CallAfter(new_notes, time=2.95, arg=(62, 65, 69, 74))
c4 = CallAfter(new_notes, time=3.45, arg=(59, 65, 67, 74))
c5 = CallAfter(new_notes, time=3.95, arg=(60, 64, 67, 72))
# The last event activates the fadeout of the amplitude envelope.
c6 = CallAfter(amp.stop, time=5.95, arg=None)
s.gui(locals())
| belangeo/pyo | pyo/examples/09-callbacks/03-delayed-calls.py | Python | lgpl-3.0 | 2,090 | 0.000478 |
import StkPortInterfaces.StkDATControlIf
import PortInterface.ProvidedPort
import re
import StkParser.StkPortCriteria
import Components.IComponent
import Parser.IPortCriteria
class StkCHeaderProvDATControlCriteria(StkParser.StkPortCriteria.StkPortCriteria):
"""STK C Header file provided DATControl criteria"""
def execute(self, inpTextContent, inoutIComponent):
## Bouml preserved body begin 000389EF
for datControl in re.findall(r'\#define\s+mDATControl(\w+)\s*\(\s*\w+\s*\)', inpTextContent):
pif = self.getPortInterfaceFactory()
dtf = self.getDataTypeFactory()
clSrvIntIf = pif.getStkDATControlIf(datControl, dtf)
provPort = PortInterface.ProvidedPort.ProvidedPort(clSrvIntIf)
provPort.setName(datControl)
provPort.setInterface(clSrvIntIf)
inoutIComponent.addPort(provPort)
## Bouml preserved body end 000389EF
def __init__(self):
super(StkCHeaderProvDATControlCriteria, self).__init__()
pass
| dmanev/ArchExtractor | ArchExtractor/umlgen/Specific/STK/StkParser/StkCFileCriteria/StkCHeaderProvDATControlCriteria.py | Python | gpl-3.0 | 1,064 | 0.007519 |
# Copyright 2020 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from kfp import components
from kfp import dsl
from kfp import compiler
component_op_1 = components.load_component_from_text("""
name: Write to GCS
inputs:
- {name: text, type: String, description: 'Content to be written to GCS'}
outputs:
- {name: output_gcs_path, type: GCSPath, description: 'GCS file path'}
implementation:
container:
image: google/cloud-sdk:slim
command:
- sh
- -c
- |
set -e -x
echo "$0" | gsutil cp - "$1"
- {inputValue: text}
- {outputUri: output_gcs_path}
""")
component_op_2 = components.load_component_from_text("""
name: Read from GCS
inputs:
- {name: input_gcs_path, type: GCSPath, description: 'GCS file path'}
implementation:
container:
image: google/cloud-sdk:slim
command:
- sh
- -c
- |
set -e -x
gsutil cat "$0"
- {inputUri: input_gcs_path}
""")
@dsl.pipeline(name='simple-two-step-pipeline', pipeline_root='dummy_root')
def my_pipeline(text: str = 'Hello world!'):
component_1 = component_op_1(text=text).set_display_name('Producer')
component_2 = component_op_2(
input_gcs_path=component_1.outputs['output_gcs_path'])
component_2.set_display_name('Consumer')
if __name__ == '__main__':
compiler.Compiler().compile(
pipeline_func=my_pipeline,
pipeline_parameters={'text': 'Hello KFP!'},
package_path=__file__.replace('.py', '.json'))
| kubeflow/pipelines | sdk/python/kfp/compiler_cli_tests/test_data/two_step_pipeline.py | Python | apache-2.0 | 2,004 | 0 |
# License MIT (https://opensource.org/licenses/MIT).
from . import models
from . import controllers
| it-projects-llc/pos-addons | wechat/__init__.py | Python | mit | 100 | 0 |
"""
CherryPy implements a simple caching system as a pluggable Tool. This tool tries
to be an (in-process) HTTP/1.1-compliant cache. It's not quite there yet, but
it's probably good enough for most sites.
In general, GET responses are cached (along with selecting headers) and, if
another request arrives for the same resource, the caching Tool will return 304
Not Modified if possible, or serve the cached response otherwise. It also sets
request.cached to True if serving a cached representation, and sets
request.cacheable to False (so it doesn't get cached again).
If POST, PUT, or DELETE requests are made for a cached resource, they invalidate
(delete) any cached response.
Usage
=====
Configuration file example::
[/]
tools.caching.on = True
tools.caching.delay = 3600
You may use a class other than the default
:class:`MemoryCache<cherrypy.lib.caching.MemoryCache>` by supplying the config
entry ``cache_class``; supply the full dotted name of the replacement class
as the config value. It must implement the basic methods ``get``, ``put``,
``delete``, and ``clear``.
You may set any attribute, including overriding methods, on the cache
instance by providing them in config. The above sets the
:attr:`delay<cherrypy.lib.caching.MemoryCache.delay>` attribute, for example.
"""
import datetime
import sys
import threading
import time
import cherrypy
from cherrypy.lib import cptools, httputil
from cherrypy._cpcompat import copyitems, ntob, set_daemon, sorted
class Cache(object):
"""Base class for Cache implementations."""
def get(self):
"""Return the current variant if in the cache, else None."""
raise NotImplemented
def put(self, obj, size):
"""Store the current variant in the cache."""
raise NotImplemented
def delete(self):
"""Remove ALL cached variants of the current resource."""
raise NotImplemented
def clear(self):
"""Reset the cache to its initial, empty state."""
raise NotImplemented
# ------------------------------- Memory Cache ------------------------------- #
class AntiStampedeCache(dict):
"""A storage system for cached items which reduces stampede collisions."""
def wait(self, key, timeout=5, debug=False):
"""Return the cached value for the given key, or None.
If timeout is not None, and the value is already
being calculated by another thread, wait until the given timeout has
elapsed. If the value is available before the timeout expires, it is
returned. If not, None is returned, and a sentinel placed in the cache
to signal other threads to wait.
If timeout is None, no waiting is performed nor sentinels used.
"""
value = self.get(key)
if isinstance(value, threading._Event):
if timeout is None:
# Ignore the other thread and recalc it ourselves.
if debug:
cherrypy.log('No timeout', 'TOOLS.CACHING')
return None
# Wait until it's done or times out.
if debug:
cherrypy.log('Waiting up to %s seconds' % timeout, 'TOOLS.CACHING')
value.wait(timeout)
if value.result is not None:
# The other thread finished its calculation. Use it.
if debug:
cherrypy.log('Result!', 'TOOLS.CACHING')
return value.result
# Timed out. Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return None
elif value is None:
# Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return value
def __setitem__(self, key, value):
"""Set the cached value for the given key."""
existing = self.get(key)
dict.__setitem__(self, key, value)
if isinstance(existing, threading._Event):
# Set Event.result so other threads waiting on it have
# immediate access without needing to poll the cache again.
existing.result = value
existing.set()
class MemoryCache(Cache):
"""An in-memory cache for varying response content.
Each key in self.store is a URI, and each value is an AntiStampedeCache.
The response for any given URI may vary based on the values of
"selecting request headers"; that is, those named in the Vary
response header. We assume the list of header names to be constant
for each URI throughout the lifetime of the application, and store
that list in ``self.store[uri].selecting_headers``.
The items contained in ``self.store[uri]`` have keys which are tuples of
request header values (in the same order as the names in its
selecting_headers), and values which are the actual responses.
"""
maxobjects = 1000
"""The maximum number of cached objects; defaults to 1000."""
maxobj_size = 100000
"""The maximum size of each cached object in bytes; defaults to 100 KB."""
maxsize = 10000000
"""The maximum size of the entire cache in bytes; defaults to 10 MB."""
delay = 600
"""Seconds until the cached content expires; defaults to 600 (10 minutes)."""
antistampede_timeout = 5
"""Seconds to wait for other threads to release a cache lock."""
expire_freq = 0.1
"""Seconds to sleep between cache expiration sweeps."""
debug = False
def __init__(self):
self.clear()
# Run self.expire_cache in a separate daemon thread.
t = threading.Thread(target=self.expire_cache, name='expire_cache')
self.expiration_thread = t
set_daemon(t, True)
t.start()
def clear(self):
"""Reset the cache to its initial, empty state."""
self.store = {}
self.expirations = {}
self.tot_puts = 0
self.tot_gets = 0
self.tot_hist = 0
self.tot_expires = 0
self.tot_non_modified = 0
self.cursize = 0
def expire_cache(self):
"""Continuously examine cached objects, expiring stale ones.
This function is designed to be run in its own daemon thread,
referenced at ``self.expiration_thread``.
"""
# It's possible that "time" will be set to None
# arbitrarily, so we check "while time" to avoid exceptions.
# See tickets #99 and #180 for more information.
while time:
now = time.time()
# Must make a copy of expirations so it doesn't change size
# during iteration
for expiration_time, objects in copyitems(self.expirations):
if expiration_time <= now:
for obj_size, uri, sel_header_values in objects:
try:
del self.store[uri][tuple(sel_header_values)]
self.tot_expires += 1
self.cursize -= obj_size
except KeyError:
# the key may have been deleted elsewhere
pass
del self.expirations[expiration_time]
time.sleep(self.expire_freq)
def get(self):
"""Return the current variant if in the cache, else None."""
request = cherrypy.serving.request
self.tot_gets += 1
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
return None
header_values = [request.headers.get(h, '')
for h in uricache.selecting_headers]
variant = uricache.wait(key=tuple(sorted(header_values)),
timeout=self.antistampede_timeout,
debug=self.debug)
if variant is not None:
self.tot_hist += 1
return variant
def put(self, variant, size):
"""Store the current variant in the cache."""
request = cherrypy.serving.request
response = cherrypy.serving.response
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
uricache = AntiStampedeCache()
uricache.selecting_headers = [
e.value for e in response.headers.elements('Vary')]
self.store[uri] = uricache
if len(self.store) < self.maxobjects:
total_size = self.cursize + size
# checks if there's space for the object
if (size < self.maxobj_size and total_size < self.maxsize):
# add to the expirations list
expiration_time = response.time + self.delay
bucket = self.expirations.setdefault(expiration_time, [])
bucket.append((size, uri, uricache.selecting_headers))
# add to the cache
header_values = [request.headers.get(h, '')
for h in uricache.selecting_headers]
uricache[tuple(sorted(header_values))] = variant
self.tot_puts += 1
self.cursize = total_size
def delete(self):
"""Remove ALL cached variants of the current resource."""
uri = cherrypy.url(qs=cherrypy.serving.request.query_string)
self.store.pop(uri, None)
def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
"""Try to obtain cached output. If fresh enough, raise HTTPError(304).
If POST, PUT, or DELETE:
* invalidates (deletes) any cached response for this resource
* sets request.cached = False
* sets request.cacheable = False
else if a cached copy exists:
* sets request.cached = True
* sets request.cacheable = False
* sets response.headers to the cached values
* checks the cached Last-Modified response header against the
current If-(Un)Modified-Since request headers; raises 304
if necessary.
* sets response.status and response.body to the cached values
* returns True
otherwise:
* sets request.cached = False
* sets request.cacheable = True
* returns False
"""
request = cherrypy.serving.request
response = cherrypy.serving.response
if not hasattr(cherrypy, "_cache"):
# Make a process-wide Cache object.
cherrypy._cache = kwargs.pop("cache_class", MemoryCache)()
# Take all remaining kwargs and set them on the Cache object.
for k, v in kwargs.items():
setattr(cherrypy._cache, k, v)
cherrypy._cache.debug = debug
# POST, PUT, DELETE should invalidate (delete) the cached copy.
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.10.
if request.method in invalid_methods:
if debug:
cherrypy.log('request.method %r in invalid_methods %r' %
(request.method, invalid_methods), 'TOOLS.CACHING')
cherrypy._cache.delete()
request.cached = False
request.cacheable = False
return False
if 'no-cache' in [e.value for e in request.headers.elements('Pragma')]:
request.cached = False
request.cacheable = True
return False
cache_data = cherrypy._cache.get()
request.cached = bool(cache_data)
request.cacheable = not request.cached
if request.cached:
# Serve the cached copy.
max_age = cherrypy._cache.delay
for v in [e.value for e in request.headers.elements('Cache-Control')]:
atoms = v.split('=', 1)
directive = atoms.pop(0)
if directive == 'max-age':
if len(atoms) != 1 or not atoms[0].isdigit():
raise cherrypy.HTTPError(400, "Invalid Cache-Control header")
max_age = int(atoms[0])
break
elif directive == 'no-cache':
if debug:
cherrypy.log('Ignoring cache due to Cache-Control: no-cache',
'TOOLS.CACHING')
request.cached = False
request.cacheable = True
return False
if debug:
cherrypy.log('Reading response from cache', 'TOOLS.CACHING')
s, h, b, create_time = cache_data
age = int(response.time - create_time)
if (age > max_age):
if debug:
cherrypy.log('Ignoring cache due to age > %d' % max_age,
'TOOLS.CACHING')
request.cached = False
request.cacheable = True
return False
# Copy the response headers. See http://www.cherrypy.org/ticket/721.
response.headers = rh = httputil.HeaderMap()
for k in h:
dict.__setitem__(rh, k, dict.__getitem__(h, k))
# Add the required Age header
response.headers["Age"] = str(age)
try:
# Note that validate_since depends on a Last-Modified header;
# this was put into the cached copy, and should have been
# resurrected just above (response.headers = cache_data[1]).
cptools.validate_since()
except cherrypy.HTTPRedirect:
x = sys.exc_info()[1]
if x.status == 304:
cherrypy._cache.tot_non_modified += 1
raise
# serve it & get out from the request
response.status = s
response.body = b
else:
if debug:
cherrypy.log('request is not cached', 'TOOLS.CACHING')
return request.cached
def tee_output():
"""Tee response output to cache storage. Internal."""
# Used by CachingTool by attaching to request.hooks
request = cherrypy.serving.request
if 'no-store' in request.headers.values('Cache-Control'):
return
def tee(body):
"""Tee response.body into a list."""
if ('no-cache' in response.headers.values('Pragma') or
'no-store' in response.headers.values('Cache-Control')):
for chunk in body:
yield chunk
return
output = []
for chunk in body:
output.append(chunk)
yield chunk
# save the cache data
body = ntob('').join(output)
cherrypy._cache.put((response.status, response.headers or {},
body, response.time), len(body))
response = cherrypy.serving.response
response.body = tee(response.body)
def expires(secs=0, force=False, debug=False):
"""Tool for influencing cache mechanisms using the 'Expires' header.
secs
Must be either an int or a datetime.timedelta, and indicates the
number of seconds between response.time and when the response should
expire. The 'Expires' header will be set to response.time + secs.
If secs is zero, the 'Expires' header is set one year in the past, and
the following "cache prevention" headers are also set:
* Pragma: no-cache
* Cache-Control': no-cache, must-revalidate
force
If False, the following headers are checked:
* Etag
* Last-Modified
* Age
* Expires
If any are already present, none of the above response headers are set.
"""
response = cherrypy.serving.response
headers = response.headers
cacheable = False
if not force:
# some header names that indicate that the response can be cached
for indicator in ('Etag', 'Last-Modified', 'Age', 'Expires'):
if indicator in headers:
cacheable = True
break
if not cacheable and not force:
if debug:
cherrypy.log('request is not cacheable', 'TOOLS.EXPIRES')
else:
if debug:
cherrypy.log('request is cacheable', 'TOOLS.EXPIRES')
if isinstance(secs, datetime.timedelta):
secs = (86400 * secs.days) + secs.seconds
if secs == 0:
if force or ("Pragma" not in headers):
headers["Pragma"] = "no-cache"
if cherrypy.serving.request.protocol >= (1, 1):
if force or "Cache-Control" not in headers:
headers["Cache-Control"] = "no-cache, must-revalidate"
# Set an explicit Expires date in the past.
expiry = httputil.HTTPDate(1169942400.0)
else:
expiry = httputil.HTTPDate(response.time + secs)
if force or "Expires" not in headers:
headers["Expires"] = expiry
| ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/cherrypy/cherrypy/lib/caching.py | Python | bsd-3-clause | 17,413 | 0.003963 |
# Copyright 2021 The SLOE Logistic Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Builds sloe_logistic package."""
from distutils import core
from distutils.command import build_clib
from pybind11.setup_helpers import build_ext
from pybind11.setup_helpers import Pybind11Extension
libraries = [
("scipy_brentq", {
"sources": ["third_party/py/scipy/optimize/Zeros/brentq.c",],
}),
]
ext_modules = [
Pybind11Extension("sloe_logistic.mle_param_integrands", [
"mle_param_integrands.cc",
]),
]
core.setup(
name="sloe_logistic",
version="0.0.1",
description="Implements SLOE method and Logistic Regression Inference",
long_description="Code to supplement the ICML submission SLOE: A Faster "
"Method for Statistical Inference in High-Dimensional Logistic Regression.",
packages=["sloe_logistic", "sloe_logistic.sloe_experiments"],
package_dir={
"sloe_logistic": ".",
"sloe_logistic.sloe_experiments": "sloe_experiments/"
},
libraries=libraries,
ext_modules=ext_modules,
cmdclass={
"build_ext": build_ext,
"build_clib": build_clib.build_clib,
},
zip_safe=False,
)
| google-research/sloe-logistic | setup.py | Python | apache-2.0 | 1,690 | 0.001183 |
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.core.urlresolvers import reverse
from vkontakte_api.admin import VkontakteModelAdmin
from .models import Album, Video
class VideoInline(admin.TabularInline):
def image(self, instance):
return '<img src="%s" />' % (instance.photo_130,)
image.short_description = 'video'
image.allow_tags = True
model = Video
fields = ('title', 'image', 'owner', 'comments_count', 'views_count')
readonly_fields = fields
extra = False
can_delete = False
class AlbumAdmin(VkontakteModelAdmin):
def image_preview(self, obj):
return u'<a href="%s"><img src="%s" height="30" /></a>' % (obj.photo_160, obj.photo_160)
image_preview.short_description = u'Картинка'
image_preview.allow_tags = True
list_display = ('image_preview', 'remote_id', 'title', 'owner', 'videos_count')
list_display_links = ('title', 'remote_id',)
search_fields = ('title', 'description')
inlines = [VideoInline]
class VideoAdmin(VkontakteModelAdmin):
def image_preview(self, obj):
return u'<a href="%s"><img src="%s" height="30" /></a>' % (obj.photo_130, obj.photo_130)
image_preview.short_description = u'Картинка'
image_preview.allow_tags = True
list_display = ('image_preview', 'remote_id', 'owner', 'album', 'title', 'comments_count', 'views_count', 'date')
list_display_links = ('remote_id', 'title')
list_filter = ('album',)
admin.site.register(Album, AlbumAdmin)
admin.site.register(Video, VideoAdmin)
| ramusus/django-vkontakte-video | vkontakte_video/admin.py | Python | bsd-3-clause | 1,566 | 0.002581 |
from ..daltools.util.full import init
Z = [8., 1., 1.]
Rc = init([0.00000000, 0.00000000, 0.48860959])
Dtot = [0, 0, -0.76539388]
Daa = init([
[ 0.00000000, 0.00000000, -0.28357300],
[ 0.15342658, 0.00000000, 0.12734703],
[-0.15342658, 0.00000000, 0.12734703],
])
QUc = init([-7.31176220, 0., 0., -5.43243232, 0., -6.36258665])
QUN = init([4.38968295, 0., 0., 0., 0., 1.75400326])
QUaa = init([
[-3.29253618, 0.00000000, 0.00000000, -4.54316657, 0.00000000, -4.00465380],
[-0.13213704, 0.00000000, 0.24980518, -0.44463288, 0.00000000, -0.26059139],
[-0.13213704, 0.00000000,-0.24980518, -0.44463288, 0.00000000, -0.26059139]
])
Fab = init([
[-0.11E-03, 0.55E-04, 0.55E-04],
[ 0.55E-04, -0.55E-04, 0.16E-30],
[ 0.55E-04, 0.16E-30, -0.55E-04]
])
Lab = init([
[0.11E-03, 0.28E-03, 0.28E-03],
[0.28E-03, 0.17E-03, 0.22E-03],
[0.28E-03, 0.22E-03, 0.17E-03]
])
la = init([
[0.0392366,-27.2474016 , 27.2081650],
[0.0358964, 27.2214515 ,-27.2573479],
[0.01211180, -0.04775576, 0.03564396],
[0.01210615, -0.00594030, -0.00616584],
[10.69975088, -5.34987556, -5.34987532],
[-10.6565582, 5.3282791 , 5.3282791]
])
O = [
0.76145382,
-0.00001648, 1.75278523,
-0.00007538, 0.00035773, 1.39756345
]
H1O = [
3.11619527,
0.00019911, 1.25132346,
2.11363325, 0.00111442, 2.12790474
]
H1 = [
0.57935224,
0.00018083, 0.43312326,
0.11495546, 0.00004222, 0.45770123
]
H2O = [
3.11568759,
0.00019821, 1.25132443,
-2.11327482, -0.00142746, 2.12790473
]
H2H1 = [
0.04078206,
-0.00008380, -0.01712262,
-0.00000098, 0.00000084, -0.00200285
]
H2 = [
0.57930522,
0.00018221, 0.43312149,
-0.11493635, -0.00016407, 0.45770123
]
Aab = init([O, H1O, H1, H2O, H2H1, H2])
Aa = init([
[ 3.87739525, 0.00018217, 3.00410918, 0.00010384, 0.00020122, 3.52546819 ],
[ 2.15784091, 0.00023848, 1.05022368, 1.17177159, 0.00059985, 1.52065218 ],
[ 2.15754005, 0.00023941, 1.05022240, -1.17157425, -0.00087738, 1.52065217 ]
])
ff = 0.001
rMP = init([
#O
[
[-8.70343886, 0.00000000, 0.00000000, -0.39827574, -3.68114747, 0.00000000, 0.00000000, -4.58632761, 0.00000000, -4.24741556],
[-8.70343235, 0.00076124, 0.00000000, -0.39827535, -3.68114147, 0.00000000, 0.00193493, -4.58631888, 0.00000000, -4.24741290],
[-8.70343291,-0.00076166, 0.00000000, -0.39827505, -3.68114128, 0.00000000, -0.00193603, -4.58631789, 0.00000000, -4.24741229],
[-8.70343685,-0.00000006, 0.00175241, -0.39827457, -3.68114516, 0.00000000, 0.00000161, -4.58632717, 0.00053363, -4.24741642],
[-8.70343685, 0.00000000, -0.00175316, -0.39827456, -3.68114514, 0.00000000, 0.00000000, -4.58632711, -0.00053592, -4.24741639],
[-8.70166502, 0.00000000, 0.00000144, -0.39688042, -3.67884999, 0.00000000, 0.00000000, -4.58395384, 0.00000080, -4.24349307],
[-8.70520554, 0.00000000, 0.00000000, -0.39967554, -3.68344246, 0.00000000, 0.00000000, -4.58868836, 0.00000000, -4.25134640],
],
#H1O
[
[ 0.00000000, 0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, 0.43066796, 0.04316104, 0.00000000, 0.36285790],
[ 0.00150789, 0.10111974, 0.00000000, 0.11541803, 0.53753360, 0.00000000, 0.43120945, 0.04333774, 0.00000000, 0.36314215],
[-0.00150230, 0.09934695, 0.00000000, 0.11398581, 0.53667861, 0.00000000, 0.43012612, 0.04298361, 0.00000000, 0.36257249],
[ 0.00000331, 0.10023328, 0.00125017, 0.11470067, 0.53710812, -0.00006107, 0.43066944, 0.04316020, 0.00015952, 0.36285848],
[ 0.00000100, 0.10023249, -0.00125247, 0.11470042, 0.53710716, 0.00006135, 0.43066837, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692, 0.10059268, -0.00000064, 0.11590322, 0.53754715, -0.00000006, 0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334, 0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, 0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
#H1
[
[-0.64828057, 0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, -0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64978846, 0.10389186, 0.00000000, 0.07204462, -0.47729337, 0.00000000, -0.03154159, -0.47074619, 0.00000000, -0.50963693],
[-0.64677827, 0.10273316, 0.00000000, 0.07173584, -0.47408263, 0.00000000, -0.03134407, -0.46768337, 0.00000000, -0.50674873],
[-0.64828388, 0.10331167, 0.00043314, 0.07189029, -0.47568875, -0.00023642, -0.03144270, -0.46921635, -0.00021728, -0.50819386],
[-0.64828157, 0.10331095, -0.00043311, 0.07188988, -0.47568608, 0.00023641, -0.03144256, -0.46921346, 0.00021729, -0.50819095],
[-0.64916749, 0.10338629, -0.00000024, 0.07234862, -0.47634698, 0.00000013, -0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723, 0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, -0.03129003, -0.46838912, 0.00000000, -0.50701656],
],
#H2O
[
[ 0.00000000,-0.10023328, 0.00000000, 0.11470275, 0.53710687, 0.00000000, -0.43066796, 0.04316104, 0.00000000, 0.36285790],
[-0.00150139,-0.09934749, 0.00000000, 0.11398482, 0.53667874, 0.00000000, -0.43012670, 0.04298387, 0.00000000, 0.36257240],
[ 0.00150826,-0.10112008, 0.00000000, 0.11541676, 0.53753350, 0.00000000, -0.43120982, 0.04333795, 0.00000000, 0.36314186],
[-0.00000130,-0.10023170, 0.00125018, 0.11470018, 0.53710620, 0.00006107, -0.43066732, 0.04316017, 0.00015952, 0.36285728],
[ 0.00000101,-0.10023249, -0.00125247, 0.11470042, 0.53710716, -0.00006135, -0.43066838, 0.04316018, -0.00015966, 0.36285788],
[ 0.00088692,-0.10059268, -0.00000064, 0.11590322, 0.53754715, 0.00000006, -0.43071206, 0.04334198, -0.00000015, 0.36330053],
[-0.00088334,-0.09987383, 0.00000000, 0.11350091, 0.53666602, 0.00000000, -0.43062352, 0.04297910, 0.00000000, 0.36241326],
],
#H2H1
[
[ 0.00000000, 0.00000000, 0.00000000, -0.00378789, 0.00148694, 0.00000000, 0.00000000, 0.00599079, 0.00000000, 0.01223822],
[ 0.00000000, 0.00004089, 0.00000000, -0.00378786, 0.00148338, 0.00000000, -0.00004858, 0.00599281, 0.00000000, 0.01224094],
[ 0.00000000,-0.00004067, 0.00000000, -0.00378785, 0.00148341, 0.00000000, 0.00004861, 0.00599277, 0.00000000, 0.01224093],
[ 0.00000000,-0.00000033, -0.00001707, -0.00378763, 0.00149017, 0.00000000, 0.00000001, 0.00599114, -0.00001229, 0.01223979],
[ 0.00000000, 0.00000000, 0.00001717, -0.00378763, 0.00149019, 0.00000000, 0.00000000, 0.00599114, 0.00001242, 0.01223980],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378978, 0.00141897, 0.00000000, 0.00000000, 0.00590445, 0.00000002, 0.01210376],
[ 0.00000000, 0.00000000, 0.00000000, -0.00378577, 0.00155694, 0.00000000, 0.00000000, 0.00607799, 0.00000000, 0.01237393],
],
#H2
[
[-0.64828057,-0.10330994, 0.00000000, 0.07188960, -0.47568174, 0.00000000, 0.03144252, -0.46920879, 0.00000000, -0.50818752],
[-0.64677918,-0.10273369, 0.00000000, 0.07173576, -0.47408411, 0.00000000, 0.03134408, -0.46768486, 0.00000000, -0.50674986],
[-0.64978883,-0.10389230, 0.00000000, 0.07204446, -0.47729439, 0.00000000, 0.03154159, -0.47074717, 0.00000000, -0.50963754],
[-0.64827927,-0.10331022, 0.00043313, 0.07188947, -0.47568340, 0.00023642, 0.03144242, -0.46921057, -0.00021727, -0.50818804],
[-0.64828158,-0.10331095, -0.00043311, 0.07188988, -0.47568609, -0.00023641, 0.03144256, -0.46921348, 0.00021729, -0.50819097],
[-0.64916749,-0.10338629, -0.00000024, 0.07234862, -0.47634698, -0.00000013, 0.03159569, -0.47003679, 0.00000011, -0.50936853],
[-0.64739723,-0.10323524, 0.00000000, 0.07143322, -0.47502412, 0.00000000, 0.03129003, -0.46838912, 0.00000000, -0.50701656]
]
])
Am = init([
[8.186766009140, 0., 0.],
[0., 5.102747935447, 0.],
[0., 0., 6.565131856389]
])
Amw = init([
[11.98694996213, 0., 0.],
[0., 4.403583657738, 0.],
[0., 0., 2.835142058626]
])
R = [
[ 0.00000, 0.00000, 0.69801],
[-1.48150, 0.00000, -0.34901],
[ 1.48150, 0.00000, -0.34901]
]
Qtot = -10.0
Q = rMP[0, 0, (0, 2, 5)]
D = rMP[1:4, 0, :]
QU = rMP[4:, 0, :]
dQa = rMP[0, :, (0,2,5)]
dQab = rMP[0, :, (1, 3, 4)]
#These are string data for testing potential file
PAn0 = """AU
3 -1 0 1
1 0.000 0.000 0.698
1 -1.481 0.000 -0.349
1 1.481 0.000 -0.349
"""
PA00 = """AU
3 0 0 1
1 0.000 0.000 0.698 -0.703
1 -1.481 0.000 -0.349 0.352
1 1.481 0.000 -0.349 0.352
"""
PA10 = """AU
3 1 0 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127
"""
PA20 = """AU
3 2 0 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261
"""
PA21 = """AU
3 2 1 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005 3.466
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261 1.576
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261 1.576
"""
PA22 = """AU
3 2 2 1
1 0.000 0.000 0.698 -0.703 -0.000 0.000 -0.284 -3.293 0.000 -0.000 -4.543 -0.000 -4.005 3.875 -0.000 -0.000 3.000 -0.000 3.524
1 -1.481 0.000 -0.349 0.352 0.153 0.000 0.127 -0.132 0.000 0.250 -0.445 0.000 -0.261 2.156 -0.000 1.106 1.051 -0.000 1.520
1 1.481 0.000 -0.349 0.352 -0.153 0.000 0.127 -0.132 -0.000 -0.250 -0.445 0.000 -0.261 2.156 -0.000 -1.106 1.051 -0.000 1.520
"""
OUTPUT_n0_1 = """\
---------------
Atomic domain 1
---------------
Domain center: 0.00000 0.00000 0.69801
"""
OUTPUT_00_1 = OUTPUT_n0_1 + """\
Nuclear charge: 8.00000
Electronic charge: -8.70344
Total charge: -0.70344
"""
OUTPUT_10_1 = OUTPUT_00_1 + """\
Electronic dipole -0.00000 0.00000 -0.28357
"""
OUTPUT_20_1 = OUTPUT_10_1 + """\
Electronic quadrupole -3.29254 0.00000 -0.00000 -4.54317 0.00000 -4.00466
"""
OUTPUT_01_1 = OUTPUT_00_1 + """\
Isotropic polarizablity (w=0) 3.46639
"""
OUTPUT_02_1 = OUTPUT_00_1 + """\
Electronic polarizability (w=0) 3.87468 -0.00000 3.00027 -0.00000 -0.00000 3.52422
"""
| fishstamp82/loprop | test/h2o_data.py | Python | gpl-3.0 | 11,431 | 0.017234 |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..mesh import MeshWarpMaths
def test_MeshWarpMaths_inputs():
input_map = dict(float_trait=dict(),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_surf=dict(mandatory=True,
),
operation=dict(usedefault=True,
),
operator=dict(mandatory=True,
),
out_file=dict(usedefault=True,
),
out_warp=dict(usedefault=True,
),
)
inputs = MeshWarpMaths.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_MeshWarpMaths_outputs():
output_map = dict(out_file=dict(),
out_warp=dict(),
)
outputs = MeshWarpMaths.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| mick-d/nipype | nipype/algorithms/tests/test_auto_MeshWarpMaths.py | Python | bsd-3-clause | 1,014 | 0.013807 |
"""
Sensor for checking the status of Hue sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.hue/
"""
import asyncio
import async_timeout
from datetime import timedelta
import logging
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_HOME,
STATE_NOT_HOME,
ATTR_GPS_ACCURACY,
ATTR_LATITUDE,
ATTR_LONGITUDE,
)
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.components.device_tracker import (
CONF_SCAN_INTERVAL,
DEFAULT_SCAN_INTERVAL,
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.util import slugify
from homeassistant.components import zone
__version__ = "1.0.5"
DEPENDENCIES = ["hue"]
_LOGGER = logging.getLogger(__name__)
TYPE_GEOFENCE = "Geofence"
SCAN_INTERVAL = DEFAULT_SCAN_INTERVAL
def get_bridges(hass):
from homeassistant.components import hue
from homeassistant.components.hue.bridge import HueBridge
return [
entry
for entry in hass.data[hue.DOMAIN].values()
if isinstance(entry, HueBridge) and entry.api
]
async def update_api(api):
import aiohue
try:
with async_timeout.timeout(10):
await api.update()
except (asyncio.TimeoutError, aiohue.AiohueException) as err:
_LOGGER.debug("Failed to fetch sensors: %s", err)
return False
return True
async def async_setup_scanner(hass, config, async_see, discovery_info=None):
interval = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
scanner = HueDeviceScanner(hass, async_see)
await scanner.async_start(hass, interval)
return True
class HueDeviceScanner(DeviceScanner):
def __init__(self, hass, async_see):
"""Initialize the scanner."""
self.hass = hass
self.async_see = async_see
async def async_start(self, hass, interval):
"""Perform a first update and start polling at the given interval."""
await self.async_update_info()
interval = max(interval, SCAN_INTERVAL)
async_track_time_interval(hass, self.async_update_info, interval)
async def async_see_sensor(self, sensor):
last_updated = sensor.state.get("lastupdated")
if not last_updated or last_updated == "none":
return
kwargs = {
"dev_id": slugify("hue_{}".format(sensor.name)),
"host_name": sensor.name,
"attributes": {
"last_updated": dt_util.as_local(dt_util.parse_datetime(last_updated)),
"unique_id": sensor.uniqueid,
},
}
if sensor.state.get("presence"):
kwargs["location_name"] = STATE_HOME
zone_home = self.hass.states.get(zone.ENTITY_ID_HOME)
if zone_home:
kwargs["gps"] = [
zone_home.attributes[ATTR_LATITUDE],
zone_home.attributes[ATTR_LONGITUDE],
]
kwargs[ATTR_GPS_ACCURACY] = 0
else:
kwargs["location_name"] = STATE_NOT_HOME
_LOGGER.debug(
"Hue Geofence %s: %s (%s)",
sensor.name,
kwargs["location_name"],
kwargs["attributes"],
)
result = await self.async_see(**kwargs)
return result
async def async_update_info(self, now=None):
"""Get the bridge info."""
bridges = get_bridges(self.hass)
if not bridges:
return
await asyncio.wait(
[update_api(bridge.api.sensors) for bridge in bridges], loop=self.hass.loop
)
sensors = [
self.async_see_sensor(sensor)
for bridge in bridges
for sensor in bridge.api.sensors.values()
if sensor.type == TYPE_GEOFENCE
]
if not sensors:
return
await asyncio.wait(sensors)
| shire210/Shire-HA | custom_components/hue_custom/device_tracker.py | Python | mit | 3,939 | 0.000508 |
# MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for mysql.connector.pooling
"""
import uuid
try:
from Queue import Queue
except ImportError:
# Python 3
from queue import Queue
import tests
import mysql.connector
from mysql.connector import errors
from mysql.connector.connection import MySQLConnection
from mysql.connector import pooling
class PoolingTests(tests.MySQLConnectorTests):
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test_generate_pool_name(self):
self.assertRaises(errors.PoolError, pooling.generate_pool_name)
config = {'host': 'ham', 'database': 'spam'}
self.assertEqual('ham_spam',
pooling.generate_pool_name(**config))
config = {'database': 'spam', 'port': 3377, 'host': 'example.com'}
self.assertEqual('example.com_3377_spam',
pooling.generate_pool_name(**config))
config = {
'user': 'ham', 'database': 'spam',
'port': 3377, 'host': 'example.com'}
self.assertEqual('example.com_3377_ham_spam',
pooling.generate_pool_name(**config))
class PooledMySQLConnectionTests(tests.MySQLConnectorTests):
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test___init__(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
self.assertRaises(TypeError, pooling.PooledMySQLConnection)
cnx = MySQLConnection(**dbconfig)
pcnx = pooling.PooledMySQLConnection(cnxpool, cnx)
self.assertEqual(cnxpool, pcnx._cnx_pool)
self.assertEqual(cnx, pcnx._cnx)
self.assertRaises(AttributeError, pooling.PooledMySQLConnection,
None, None)
self.assertRaises(AttributeError, pooling.PooledMySQLConnection,
cnxpool, None)
def test___getattr__(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, pool_name='test')
cnx = MySQLConnection(**dbconfig)
pcnx = pooling.PooledMySQLConnection(cnxpool, cnx)
exp_attrs = {
'_connection_timeout': dbconfig['connection_timeout'],
'_database': dbconfig['database'],
'_host': dbconfig['host'],
'_password': dbconfig['password'],
'_port': dbconfig['port'],
'_unix_socket': dbconfig['unix_socket']
}
for attr, value in exp_attrs.items():
self.assertEqual(
value,
getattr(pcnx, attr),
"Attribute {0} of reference connection not correct".format(
attr))
self.assertEqual(pcnx.connect, cnx.connect)
def test_close(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
cnxpool._original_cnx = None
def dummy_add_connection(self, cnx=None):
self._original_cnx = cnx
cnxpool.add_connection = dummy_add_connection.__get__(
cnxpool, pooling.MySQLConnectionPool)
pcnx = pooling.PooledMySQLConnection(cnxpool,
MySQLConnection(**dbconfig))
cnx = pcnx._cnx
pcnx.close()
self.assertEqual(cnx, cnxpool._original_cnx)
def test_config(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
cnx = cnxpool.get_connection()
self.assertRaises(errors.PoolError, cnx.config, user='spam')
class MySQLConnectionPoolTests(tests.MySQLConnectorTests):
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test___init__(self):
dbconfig = tests.get_mysql_config()
self.assertRaises(errors.PoolError, pooling.MySQLConnectionPool)
self.assertRaises(AttributeError, pooling.MySQLConnectionPool,
pool_name='test',
pool_size=-1)
self.assertRaises(AttributeError, pooling.MySQLConnectionPool,
pool_name='test',
pool_size=0)
self.assertRaises(AttributeError, pooling.MySQLConnectionPool,
pool_name='test',
pool_size=(pooling.CNX_POOL_MAXSIZE + 1))
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
self.assertEqual(5, cnxpool._pool_size)
self.assertEqual('test', cnxpool._pool_name)
self.assertEqual({}, cnxpool._cnx_config)
self.assertTrue(isinstance(cnxpool._cnx_queue, Queue))
self.assertTrue(isinstance(cnxpool._config_version, uuid.UUID))
self.assertTrue(True, cnxpool._reset_session)
cnxpool = pooling.MySQLConnectionPool(pool_size=10, pool_name='test')
self.assertEqual(10, cnxpool._pool_size)
cnxpool = pooling.MySQLConnectionPool(pool_size=10, **dbconfig)
self.assertEqual(dbconfig, cnxpool._cnx_config,
"Connection configuration not saved correctly")
self.assertEqual(10, cnxpool._cnx_queue.qsize())
self.assertTrue(isinstance(cnxpool._config_version, uuid.UUID))
cnxpool = pooling.MySQLConnectionPool(pool_size=1, pool_name='test',
pool_reset_session=False)
self.assertFalse(cnxpool._reset_session)
def test_pool_name(self):
"""Test MySQLConnectionPool.pool_name property"""
pool_name = 'ham'
cnxpool = pooling.MySQLConnectionPool(pool_name=pool_name)
self.assertEqual(pool_name, cnxpool.pool_name)
def test_reset_session(self):
"""Test MySQLConnectionPool.reset_session property"""
cnxpool = pooling.MySQLConnectionPool(pool_name='test',
pool_reset_session=False)
self.assertFalse(cnxpool.reset_session)
cnxpool._reset_session = True
self.assertTrue(cnxpool.reset_session)
def test_pool_size(self):
"""Test MySQLConnectionPool.pool_size property"""
pool_size = 4
cnxpool = pooling.MySQLConnectionPool(pool_name='test',
pool_size=pool_size)
self.assertEqual(pool_size, cnxpool.pool_size)
def test_reset_session(self):
"""Test MySQLConnectionPool.reset_session property"""
cnxpool = pooling.MySQLConnectionPool(pool_name='test',
pool_reset_session=False)
self.assertFalse(cnxpool.reset_session)
cnxpool._reset_session = True
self.assertTrue(cnxpool.reset_session)
def test__set_pool_size(self):
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
self.assertRaises(AttributeError, cnxpool._set_pool_size, -1)
self.assertRaises(AttributeError, cnxpool._set_pool_size, 0)
self.assertRaises(AttributeError, cnxpool._set_pool_size,
pooling.CNX_POOL_MAXSIZE + 1)
cnxpool._set_pool_size(pooling.CNX_POOL_MAXSIZE - 1)
self.assertEqual(pooling.CNX_POOL_MAXSIZE - 1, cnxpool._pool_size)
def test__set_pool_name(self):
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
self.assertRaises(AttributeError, cnxpool._set_pool_name, 'pool name')
self.assertRaises(AttributeError, cnxpool._set_pool_name, 'pool%%name')
self.assertRaises(AttributeError, cnxpool._set_pool_name,
'long_pool_name' * pooling.CNX_POOL_MAXNAMESIZE)
def test_add_connection(self):
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
self.assertRaises(errors.PoolError, cnxpool.add_connection)
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=2, pool_name='test')
cnxpool.set_config(**dbconfig)
cnxpool.add_connection()
pcnx = pooling.PooledMySQLConnection(
cnxpool,
cnxpool._cnx_queue.get(block=False))
self.assertTrue(isinstance(pcnx._cnx, MySQLConnection))
self.assertEqual(cnxpool, pcnx._cnx_pool)
self.assertEqual(cnxpool._config_version,
pcnx._cnx._pool_config_version)
cnx = pcnx._cnx
pcnx.close()
# We should get the same connectoin back
self.assertEqual(cnx, cnxpool._cnx_queue.get(block=False))
cnxpool.add_connection(cnx)
# reach max connections
cnxpool.add_connection()
self.assertRaises(errors.PoolError, cnxpool.add_connection)
# fail connecting
cnxpool._remove_connections()
cnxpool._cnx_config['port'] = 9999999
cnxpool._cnx_config['unix_socket'] = '/ham/spam/foobar.socket'
self.assertRaises(errors.InterfaceError, cnxpool.add_connection)
self.assertRaises(errors.PoolError, cnxpool.add_connection, cnx=str)
def test_set_config(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_name='test')
# No configuration changes
config_version = cnxpool._config_version
cnxpool.set_config()
self.assertEqual(config_version, cnxpool._config_version)
self.assertEqual({}, cnxpool._cnx_config)
# Valid configuration changes
config_version = cnxpool._config_version
cnxpool.set_config(**dbconfig)
self.assertEqual(dbconfig, cnxpool._cnx_config)
self.assertNotEqual(config_version, cnxpool._config_version)
# Invalid configuration changes
config_version = cnxpool._config_version
wrong_dbconfig = dbconfig.copy()
wrong_dbconfig['spam'] = 'ham'
self.assertRaises(errors.PoolError, cnxpool.set_config,
**wrong_dbconfig)
self.assertEqual(dbconfig, cnxpool._cnx_config)
self.assertEqual(config_version, cnxpool._config_version)
def test_get_connection(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(pool_size=2, pool_name='test')
self.assertRaises(errors.PoolError, cnxpool.get_connection)
cnxpool = pooling.MySQLConnectionPool(pool_size=1, **dbconfig)
# Get connection from pool
pcnx = cnxpool.get_connection()
self.assertTrue(isinstance(pcnx, pooling.PooledMySQLConnection))
self.assertRaises(errors.PoolError, cnxpool.get_connection)
self.assertEqual(pcnx._cnx._pool_config_version,
cnxpool._config_version)
prev_config_version = pcnx._pool_config_version
prev_thread_id = pcnx.connection_id
pcnx.close()
# Change configuration
config_version = cnxpool._config_version
cnxpool.set_config(autocommit=True)
self.assertNotEqual(config_version, cnxpool._config_version)
pcnx = cnxpool.get_connection()
self.assertNotEqual(
pcnx._cnx._pool_config_version, prev_config_version)
self.assertNotEqual(prev_thread_id, pcnx.connection_id)
self.assertEqual(1, pcnx.autocommit)
pcnx.close()
def test__remove_connections(self):
dbconfig = tests.get_mysql_config()
cnxpool = pooling.MySQLConnectionPool(
pool_size=2, pool_name='test', **dbconfig)
pcnx = cnxpool.get_connection()
self.assertEqual(1, cnxpool._remove_connections())
pcnx.close()
self.assertEqual(1, cnxpool._remove_connections())
self.assertEqual(0, cnxpool._remove_connections())
self.assertRaises(errors.PoolError, cnxpool.get_connection)
class ModuleConnectorPoolingTests(tests.MySQLConnectorTests):
"""Testing MySQL Connector module pooling functionality"""
def tearDown(self):
mysql.connector._CONNECTION_POOLS = {}
def test__connection_pools(self):
self.assertEqual(mysql.connector._CONNECTION_POOLS, {})
def test__get_pooled_connection(self):
dbconfig = tests.get_mysql_config()
mysql.connector._CONNECTION_POOLS.update({'spam': 'ham'})
self.assertRaises(errors.InterfaceError,
mysql.connector.connect, pool_name='spam')
mysql.connector._CONNECTION_POOLS = {}
mysql.connector.connect(pool_name='ham', **dbconfig)
self.assertTrue('ham' in mysql.connector._CONNECTION_POOLS)
cnxpool = mysql.connector._CONNECTION_POOLS['ham']
self.assertTrue(isinstance(cnxpool,
pooling.MySQLConnectionPool))
self.assertEqual('ham', cnxpool.pool_name)
mysql.connector.connect(pool_size=5, **dbconfig)
pool_name = pooling.generate_pool_name(**dbconfig)
self.assertTrue(pool_name in mysql.connector._CONNECTION_POOLS)
def test_connect(self):
dbconfig = tests.get_mysql_config()
cnx = mysql.connector.connect(pool_size=1, pool_name='ham', **dbconfig)
exp = cnx.connection_id
cnx.close()
self.assertEqual(
exp,
mysql.connector._get_pooled_connection(
pool_name='ham').connection_id
)
| gelab/mainr | tests/test_pooling.py | Python | gpl-2.0 | 14,346 | 0.00007 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.