text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from core.exception import ProductError
from core.core import Core
from core.helpers.yaml_literal import Literal
from core.env_manager import EnvManager
from core.helpers.version import compare_versions
from core.download_manager import DownloadManager
from core.log_manager import LogManager
from core.models.platform import Platform
from core.models.file import File
from core.models.installer import Installer
from core.models.installed_product import InstalledProductInfo
import os
from collections import Iterable
from collections import OrderedDict
class BaseProduct(object):
"""
Represent abstract class for product.
Descendants: Product, Application, Engine.
"""
def get_typename(self):
"""
Descendant return string with type name: 'product', 'application' or 'engine'.
This type name used in yaml represenstion for product:
- product: Product1
title: ...
"""
raise NotImplementedError()
def __init__(self, core: Core, attrs=None ):
self.data = {}
# internal
self.core = core
# self.envs do nothing
self.envs = EnvManager()
self.installer = None
# next fields are loaded from yaml
# meta
self.name = None
self.title = None
self.description = None
self.link = None
self.author = None
self.icon = None
# string list
self.tags = []
self.eula = None
# fiters
self.os = None
self.bitness = None
self.web_server = None
self.lang = None
self.platform = None
# versions
self.version = ''
self.installed_version = ''
# installer stuff
self.dependencies = []
self.files = []
self.install_command = None
self.upgrade_command = None
self.config = None
self.uninstall_command = None
self.find_installed_command = None
self.parameters = {}
if attrs is not None:
for item in attrs.keys():
self.__dict__[item] = attrs[item]
self.name = attrs[self.get_typename()]
if attrs.get('files'):
# create File objects for every file item
self.files = []
for f in attrs.get('files', []):
if f.get('file'):
self.files.append(File(self, **f))
self.installer = Installer(self, self.install_command, self.upgrade_command,
self.uninstall_command, self.find_installed_command)
def merge(self, **kwargs):
"""
Merges product representation in 'kwargs' dict with current product.
Current product fields are overwritten by values from kwargs.
:param kwargs: dict with product representation to merge.
"""
# name of attribute (product, engine, application) with product name
typename = self.get_typename()
self.name = kwargs[typename]
####
self.title = kwargs.get('title') or self.title
self.description = kwargs.get('description') or self.description
self.link = kwargs.get('link') or self.link
self.author = kwargs.get('author') or self.author
self.icon = kwargs.get('icon') or self.icon
self.tags = kwargs.get('tags') or self.tags
self.eula = kwargs.get('eula') or self.eula
self.os = kwargs.get('os') or self.os
self.bitness = kwargs.get('bitness') or self.bitness
self.web_server = kwargs.get('webserver') or self.web_server
self.lang = kwargs.get('lang') or self.lang
# это поле не читается из ямла, оно создаётся из:
self.platform = Platform(self.os, self.bitness, self.web_server, self.lang)
self.dependencies = kwargs.get('dependencies', []) or self.dependencies
self.version = str(kwargs.get('version', '')) or self.version
self.installed_version = str(kwargs.get('installed_version', '')) or self.installed_version
self.config = kwargs.get('config') or self.config
files_list = kwargs.get('files', [])
if files_list and len(files_list)>0:
# create File objects for every file item
self.files = []
for f in files_list:
fs = f.__getstate__() if isinstance(f, File) else f
if fs.get('file'):
self.files.append(File(self, **fs))
self.install_command = kwargs.get('install_command', None) or self.install_command
if self.install_command:
self.install_command = self.install_command.rstrip()
self.upgrade_command = kwargs.get('upgrade_command', None) or self.upgrade_command
if self.upgrade_command:
self.upgrade_command = self.upgrade_command.rstrip()
self.uninstall_command = kwargs.get('uninstall_command', None) or self.uninstall_command
if self.uninstall_command:
self.uninstall_command = self.uninstall_command.rstrip()
self.find_installed_command = kwargs.get('find_installed_command', None) or self.find_installed_command
if self.find_installed_command:
self.find_installed_command = self.find_installed_command.rstrip()
self.parameters = kwargs.get('parameters', None) or self.parameters
# TODO must be deprecated
# create Installer from commands,
# installer knows to how call install_command, uninstall_command
self.installer = Installer(self, self.install_command,
self.upgrade_command,
self.uninstall_command,
self.find_installed_command)
def __setattr__(self, name, value):
self.__dict__[name] = value
def __getstate__(self):
"""
Returns ProductState object used in Yaml dumper.
ProductState is smart OrderedDict wrapper.
Method saves only non empty fields.
:rtype : ProductState
"""
result = ProductState()
result[self.get_typename()] = self.name
if self.title:
result['title'] = self.title
if self.description:
result['description'] = Literal(self.description)
if self.link:
result['link'] = self.link
if self.author:
result['author'] = self.author
if self.icon:
result['icon'] = self.icon
if self.tags:
result['tags'] = self.tags
if self.eula:
result['eula'] = self.eula
if self.os:
result['os'] = self.os
if self.bitness:
result['bitness'] = self.bitness
if self.web_server:
result['web_server'] = self.web_server
if self.lang:
result['lang'] = self.lang
if self.version:
result['version'] = self.version
result['installed_version'] = self.get_installed_version()
if self.dependencies:
result['dependencies'] = self.dependencies
if self.files:
result['files'] = [file.__getstate__() for file in self.files]
if self.install_command:
result['install_command'] = Literal(self.install_command)
if self.upgrade_command:
result['upgrade_command'] = Literal(self.upgrade_command)
if self.uninstall_command:
result['uninstall_command'] = Literal(self.uninstall_command)
if self.find_installed_command:
result['find_installed_command'] = Literal(self.find_installed_command)
result['parameters'] = self.parameters or []
if self.config:
result["config"] = self.config
return result
def to_dict(self, rich=False)-> OrderedDict:
"""
Dump product fields to OrderedDict used in JSON responses.
:param rich: dump additional calculated fields.
"""
product_dict = self.__getstate__().get_dict()
if rich:
product_dict['name'] = self.name
product_dict['has_upgrade'] = self.has_upgrade()
product_dict['is_upgradable'] = self.is_upgradable()
return product_dict
def __repr__(self):
return '{0}({1}, {2})'.format(self.__class__.__name__, self.name, self.version or None)
def is_installed(self)-> bool:
"""
Check that great or equal version installed.
Used in DependencyManager and self.install method.
"""
if not self.installed_version:
self.get_installed_version()
if self.installed_version:
compare_result = compare_versions(self.version, self.installed_version)
return compare_result <= 0
return False
def is_installed_any_version(self):
"""
Check that any version of product is installed.
Used in DependencyManager and self.upgrade method.
"""
if not self.installed_version:
self.get_installed_version()
return bool(self.installed_version)
def install(self, parameters: dict)-> bool:
"""
Installs itself:
Skip installation if product is installed.
Download files if they are not in cache yet.
Call installer object with parameters.
:param parameters: dict with parameters for installation, example: {'install_dir': 'c:\...'}
:return: whether the installation was successful
"""
if self.is_installed():
return False
# set name for log filename
self.set_log_path('install')
self.download()
result = self.installer.install(parameters)
self.unset_log_path('install')
# logging.info("i have finished installing produxt {0}".format( str(result) ) )
return result
def upgrade(self, parameters: dict)-> bool:
"""
Upgrades itself:
Works like install(), but call 'upgrade' methid of installer.
Download files if they are not in cache yet.
Call installer object with parameters.
:param parameters:
:return: whether the upgrade was successful
"""
# set name for lof filename
self.set_log_path('upgrade')
self.download()
result = self.installer.upgrade(parameters)
self.unset_log_path('upgrade')
return result
def uninstall(self):
"""
Uninstalls itself.
:return: whether the uninstallation was successful
"""
self.set_log_path('uninstall')
result = self.installer.uninstall()
self.unset_log_path('uninstall')
return result
def find_installed(self)-> InstalledProductInfo:
"""
Check whether itself is installed by calling find_installed_command.
"""
result = None
try:
result = self.installer.find_installed()
if result is not None:
# remove trailing / from the end of
if result.install_dir and (result.install_dir.startswith('/') or result.install_dir.endswith('\\')):
result.install_dir = result.install_dir[:-1]
except Exception as e:
raise ProductError("Product {0} find installed command failed ".format(self.name))
return result
def get_installed_version(self):
"""
Searches and returns installed version in current products collection, see Core.current.
"""
if not self.installed_version:
# search in current by name if installed_version is empty
synced_product = self.core.current_storage.feed.get_product(self.name)
if synced_product:
# save version if we are found itself in current
self.installed_version = synced_product.installed_version
return self.installed_version
def get_dependencies(self)-> list:
"""
Returns list of dependencies (product names)
"""
return self.dependencies or []
def download(self):
"""
Downloads files for installation with DownloadManager.
"""
if self.files:
downloader = DownloadManager(self.core, self)
downloader.download(self.files)
# DEPRECATED
def set_log_path(self, action: str):
"""
Sets env variables with log paths. Used in installer.
:param action: name of current action: 'install', 'uninstall', etc.
"""
# self.envs['%{0}_LOG_PATH%'.format(action.upper())] = \
# LogManager.get_instance(self.core).get_log_path(action)
# self.envs['%{0}_LOG_DIR%'.format(action.upper())] = \
# LogManager.get_instance(self.core).get_log_dir()
pass
# DEPRECATED
def unset_log_path(self, action):
"""
Removes env variables with log path. Called after installation.
:param action: name of current action: 'install', 'uninstall', etc.
"""
pass
# del self.envs['%{0}_LOG_DIR%'.format(action.upper())]
def has_search_tokens(self, *tokens)-> bool:
"""
Checks that product attributes has at least one token from tokens list.
Used in product search.
:param tokens: list of string token to search: ['php', 'mysql']
"""
for token in tokens:
if not self.has_search_token(token):
return False
return True
def has_search_token(self, token: str)-> bool:
"""
Checks that search token are in product name, title, description, link or tags.
Used in product search.
:param token:
"""
token = token.strip().lower()
# search token in:
# product.name
if self.name.lower().find(token) >= 0:
return True
# product.title
if self.title.lower().find(token) >= 0:
return True
# product.description
if self.description and self.description.lower().find(token) >= 0:
return True
# product.link
if self.link and self.link.lower().find(token) >= 0:
return True
# product.tags
if self.tags:
for tag in self.tags:
if tag.lower().find(token) >= 0:
return True
return False
def has_upgrade(self)-> bool:
"""
Checks that installed version is less then product version.
This used as product attribute in templates for web ui.
"""
# get installed version
installed_version = self.get_installed_version()
if installed_version:
# compare installed and own version
if compare_versions(self.version, installed_version) > 0:
return True
return False
def is_upgradable(self)-> bool:
"""
Check that product can be upgraded.
This used as product attribute in templates for web ui.
"""
return bool(self.upgrade_command)
def get_product_with_version(self)-> str:
"""
Returns product name with version formatted as 'Product==1.2.3.4'.
Used in DependencyManager and ProductCollection.
"""
return "{0}=={1}".format(self.name, self.version)
class ProductState(Iterable):
"""
Represents product state as OrderedDict.
Used for serialization to json and yaml.
"""
def __init__(self):
self._coll = OrderedDict()
def __iter__(self):
# iterator
for k, v in self._coll.items():
yield k, v
def __setitem__(self, key, value):
"""
Special method to implement 'ps[key] = value' syntax.
"""
self._coll.__setitem__(key, value)
def get_dict(self)-> OrderedDict:
"""
Returns result as OrderedDict
"""
return self._coll
| helicontech/zoo | Zoocmd/core/models/base_product.py | Python | apache-2.0 | 16,060 | 0.002684 |
# Copyright (c) 2010 Mark Sandstrom
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from functools import partial
from django.http import HttpResponse
from base import RerouteRegexURLPattern, url_with_pattern_class
from utils import rollup
__all__ = ['verb_url', 'request_method']
def request_method(request):
'''Returns the effective HTTP method of a request. To support the entire range of HTTP methods
from HTML forms (which only support GET and POST), an HTTP method may be emulated by
setting a POST parameter named "_method" to the name of the HTTP method to be emulated.
Example HTML:
<!-- Submits a form using the PUT method -->
<form>
<input type="text" name="name" value="value" />
<button type="submit" name="_method" value="put">Update</button>
</form>
Args:
request: an HttpRequest
Returns:
An upper-case string naming the HTTP method (like django.http.HttpRequest.method)
'''
# For security reasons POST is the only method that supports HTTP method emulation.
# For example, if POST requires csrf_token, we don't want POST methods to be called via
# GET (thereby bypassing CSRF protection). POST has the most limited semantics, and it
# is therefore safe to emulate HTTP methods with less-limited semantics. See
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html ("Safe and Idempotent Methods")
# for details.
if request.method == 'POST' and '_method' in request.POST:
method = request.POST['_method'].upper()
else:
method = request.method
return method
class VerbRegexURLPattern(RerouteRegexURLPattern):
patterns_index = {}
def __init__(self, method, *args, **kwargs):
super(VerbRegexURLPattern, self).__init__(*args, **kwargs)
self.method = method.upper()
def reroute_callback(self, request, *args, **kwargs):
record = self.method_callbacks.get(request_method(request))
if not record:
return HttpResponse(status=405)
callback = record['callback']
kwargs.update(record['default_args'])
callback = rollup(callback, self.wrappers)
return callback(request, *args, **kwargs)
def reroute_config(self, wrappers, patterns_id):
super(VerbRegexURLPattern, self).reroute_config(wrappers, patterns_id)
# Let patterns with identical regexes that are defined within the same call
# to reroute_patterns be called a pattern group. Each pattern in a pattern group
# has a reference to shared dict (shared by the group) which maps http methods
# to pattern callbacks. Only one pattern from a group will ever be resolved (remember
# that the patterns all have identical regexes), so this shared dict is used to route
# to the correct callback for a given http method. All this hoopla is necessary since
# patterns are resolved outside the context of a request.
method_callbacks_by_regex = self.patterns_index.setdefault(patterns_id, {})
method_callbacks = method_callbacks_by_regex.setdefault(self.regex.pattern, {})
if self.method not in method_callbacks:
method_callbacks[self.method] = {'callback': self.callback, 'default_args': self.default_args}
self.default_args = {}
# Borg-like
self.method_callbacks = method_callbacks
def verb_url(method, regex, view, kwargs=None, name=None, prefix=''):
pattern_class = partial(VerbRegexURLPattern, method)
return url_with_pattern_class(pattern_class, regex, view, kwargs, name, prefix)
| dnerdy/django-reroute | reroute/verbs.py | Python | mit | 4,764 | 0.009236 |
"""
GitHub WebHooks Server.
"""
| chevah/txghserf | txghserf/__init__.py | Python | bsd-3-clause | 32 | 0 |
import pytest
def test_fallback_on_default(docker_compose, nginxproxy):
r = nginxproxy.get("http://unknown.nginx-proxy.tld/port")
assert r.status_code == 200
assert r.text == "answer from port 81\n" | jwilder/nginx-proxy | test/test_default-host.py | Python | mit | 212 | 0.004717 |
from jupyter_server.utils import url_path_join as ujoin
from .config import Lmod as LmodConfig
from .handler import default_handlers, PinsHandler
def _jupyter_server_extension_paths():
return [{"module": "jupyterlmod"}]
# Jupyter Extension points
def _jupyter_nbextension_paths():
return [
dict(
section="tree", src="static", dest="jupyterlmod", require="jupyterlmod/main"
)
]
def load_jupyter_server_extension(nbapp):
"""
Called when the extension is loaded.
Args:
nbapp : handle to the Notebook webserver instance.
"""
nbapp.log.info("Loading lmod extension")
lmod_config = LmodConfig(parent=nbapp)
launcher_pins = lmod_config.launcher_pins
web_app = nbapp.web_app
base_url = web_app.settings["base_url"]
for path, class_ in default_handlers:
web_app.add_handlers(".*$", [(ujoin(base_url, path), class_)])
web_app.add_handlers(".*$", [
(ujoin(base_url, 'lmod/launcher-pins'), PinsHandler, {'launcher_pins': launcher_pins}),
])
| cmd-ntrf/jupyter-lmod | jupyterlmod/__init__.py | Python | mit | 1,051 | 0.001903 |
def twoRepEle(array):
if not array:
return None
else:
hash_table={}
for x in array:
if x in hash_table:
hash_table[x]+=1
elif x !=' ':
hash_table[x]=1
else:
hash_table[x]=0
for x in array:
if hash_table[x]==2:
print x
array=[1,2,3,1,2,5,6,7]
twoRepEle(array) | cmjagtap/Algorithms_and_DS | searching/twoRepetedEle.py | Python | gpl-3.0 | 296 | 0.101351 |
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# This file is in the public domain
### END LICENSE
import gettext
from gettext import gettext as _
gettext.textdomain('brazo')
import logging
logger = logging.getLogger('brazo')
from brazo_lib.AboutDialog import AboutDialog
# See brazo_lib.AboutDialog.py for more details about how this class works.
class AboutBrazoDialog(AboutDialog):
__gtype_name__ = "AboutBrazoDialog"
def finish_initializing(self, builder): # pylint: disable=E1002
"""Set up the about dialog"""
super(AboutBrazoDialog, self).finish_initializing(builder)
# Code for other initialization actions should be added here.
| nickpascucci/Robot-Arm | software/desktop/brazo/brazo/AboutBrazoDialog.py | Python | mit | 725 | 0.011034 |
from django import forms
class LoginForm(forms.Form):
login = forms.CharField(max_length=255)
password = forms.CharField(widget=forms.PasswordInput())
target = forms.CharField()
| sstacha/uweb-install | cms_files/forms.py | Python | apache-2.0 | 192 | 0.005208 |
#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import os
from lib.core.common import Backend
from lib.core.common import isStackingAvailable
from lib.core.common import readInput
from lib.core.common import runningAsAdmin
from lib.core.data import conf
from lib.core.data import logger
from lib.core.enums import DBMS
from lib.core.enums import OS
from lib.core.exception import SqlmapFilePathException
from lib.core.exception import SqlmapMissingDependence
from lib.core.exception import SqlmapMissingMandatoryOptionException
from lib.core.exception import SqlmapMissingPrivileges
from lib.core.exception import SqlmapNotVulnerableException
from lib.core.exception import SqlmapUndefinedMethod
from lib.core.exception import SqlmapUnsupportedDBMSException
from lib.takeover.abstraction import Abstraction
from lib.takeover.icmpsh import ICMPsh
from lib.takeover.metasploit import Metasploit
from lib.takeover.registry import Registry
from plugins.generic.misc import Miscellaneous
class Takeover(Abstraction, Metasploit, ICMPsh, Registry, Miscellaneous):
"""
This class defines generic OS takeover functionalities for plugins.
"""
def __init__(self):
self.cmdTblName = "sqlmapoutput"
self.tblField = "data"
Abstraction.__init__(self)
def osCmd(self):
if isStackingAvailable() or conf.direct:
web = False
elif not isStackingAvailable() and Backend.isDbms(DBMS.MYSQL):
infoMsg = "going to use a web backdoor for command execution"
logger.info(infoMsg)
web = True
else:
errMsg = "unable to execute operating system commands via "
errMsg += "the back-end DBMS"
raise SqlmapNotVulnerableException(errMsg)
self.getRemoteTempPath()
self.initEnv(web=web)
if not web or (web and self.webBackdoorUrl is not None):
self.runCmd(conf.osCmd)
if not conf.osShell and not conf.osPwn and not conf.cleanup:
self.cleanup(web=web)
def osShell(self):
if isStackingAvailable() or conf.direct:
web = False
elif not isStackingAvailable() and Backend.isDbms(DBMS.MYSQL):
infoMsg = "going to use a web backdoor for command prompt"
logger.info(infoMsg)
web = True
else:
errMsg = "unable to prompt for an interactive operating "
errMsg += "system shell via the back-end DBMS because "
errMsg += "stacked queries SQL injection is not supported"
raise SqlmapNotVulnerableException(errMsg)
self.getRemoteTempPath()
self.initEnv(web=web)
if not web or (web and self.webBackdoorUrl is not None):
self.shell()
if not conf.osPwn and not conf.cleanup:
self.cleanup(web=web)
def osPwn(self):
goUdf = False
fallbackToWeb = False
setupSuccess = False
self.checkDbmsOs()
if Backend.isOs(OS.WINDOWS):
msg = "how do you want to establish the tunnel?"
msg += "\n[1] TCP: Metasploit Framework (default)"
msg += "\n[2] ICMP: icmpsh - ICMP tunneling"
valids = (1, 2)
while True:
tunnel = readInput(msg, default=1)
if isinstance(tunnel, basestring) and tunnel.isdigit() and int(tunnel) in valids:
tunnel = int(tunnel)
break
elif isinstance(tunnel, int) and tunnel in valids:
break
else:
warnMsg = "invalid value, valid values are 1 and 2"
logger.warn(warnMsg)
else:
tunnel = 1
debugMsg = "the tunnel can be established only via TCP when "
debugMsg += "the back-end DBMS is not Windows"
logger.debug(debugMsg)
if tunnel == 2:
isAdmin = runningAsAdmin()
if not isAdmin:
errMsg = "you need to run sqlmap as an administrator "
errMsg += "if you want to establish an out-of-band ICMP "
errMsg += "tunnel because icmpsh uses raw sockets to "
errMsg += "sniff and craft ICMP packets"
raise SqlmapMissingPrivileges(errMsg)
try:
from impacket import ImpactDecoder
from impacket import ImpactPacket
except ImportError:
errMsg = "sqlmap requires 'python-impacket' third-party library "
errMsg += "in order to run icmpsh master. You can get it at "
errMsg += "http://code.google.com/p/impacket/downloads/list"
raise SqlmapMissingDependence(errMsg)
sysIgnoreIcmp = "/proc/sys/net/ipv4/icmp_echo_ignore_all"
if os.path.exists(sysIgnoreIcmp):
fp = open(sysIgnoreIcmp, "wb")
fp.write("1")
fp.close()
else:
errMsg = "you need to disable ICMP replies by your machine "
errMsg += "system-wide. For example run on Linux/Unix:\n"
errMsg += "# sysctl -w net.ipv4.icmp_echo_ignore_all=1\n"
errMsg += "If you miss doing that, you will receive "
errMsg += "information from the database server and it "
errMsg += "is unlikely to receive commands sent from you"
logger.error(errMsg)
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL):
self.sysUdfs.pop("sys_bineval")
self.getRemoteTempPath()
if isStackingAvailable() or conf.direct:
web = False
self.initEnv(web=web)
if tunnel == 1:
if Backend.getIdentifiedDbms() in (DBMS.MYSQL, DBMS.PGSQL):
msg = "how do you want to execute the Metasploit shellcode "
msg += "on the back-end database underlying operating system?"
msg += "\n[1] Via UDF 'sys_bineval' (in-memory way, anti-forensics, default)"
msg += "\n[2] Via shellcodeexec (file system way, preferred on 64-bit systems)"
while True:
choice = readInput(msg, default=1)
if isinstance(choice, basestring) and choice.isdigit() and int(choice) in (1, 2):
choice = int(choice)
break
elif isinstance(choice, int) and choice in (1, 2):
break
else:
warnMsg = "invalid value, valid values are 1 and 2"
logger.warn(warnMsg)
if choice == 1:
goUdf = True
if goUdf:
exitfunc = "thread"
setupSuccess = True
else:
exitfunc = "process"
self.createMsfShellcode(exitfunc=exitfunc, format="raw", extra="BufferRegister=EAX", encode="x86/alpha_mixed")
if not goUdf:
setupSuccess = self.uploadShellcodeexec(web=web)
if setupSuccess is not True:
if Backend.isDbms(DBMS.MYSQL):
fallbackToWeb = True
else:
msg = "unable to mount the operating system takeover"
raise SqlmapFilePathException(msg)
if Backend.isOs(OS.WINDOWS) and Backend.isDbms(DBMS.MYSQL) and conf.privEsc:
debugMsg = "by default MySQL on Windows runs as SYSTEM "
debugMsg += "user, no need to privilege escalate"
logger.debug(debugMsg)
elif tunnel == 2:
setupSuccess = self.uploadIcmpshSlave(web=web)
if setupSuccess is not True:
if Backend.isDbms(DBMS.MYSQL):
fallbackToWeb = True
else:
msg = "unable to mount the operating system takeover"
raise SqlmapFilePathException(msg)
if not setupSuccess and Backend.isDbms(DBMS.MYSQL) and not conf.direct and (not isStackingAvailable() or fallbackToWeb):
web = True
if fallbackToWeb:
infoMsg = "falling back to web backdoor to establish the tunnel"
else:
infoMsg = "going to use a web backdoor to establish the tunnel"
logger.info(infoMsg)
self.initEnv(web=web, forceInit=fallbackToWeb)
if self.webBackdoorUrl:
if not Backend.isOs(OS.WINDOWS) and conf.privEsc:
# Unset --priv-esc if the back-end DBMS underlying operating
# system is not Windows
conf.privEsc = False
warnMsg = "sqlmap does not implement any operating system "
warnMsg += "user privilege escalation technique when the "
warnMsg += "back-end DBMS underlying system is not Windows"
logger.warn(warnMsg)
if tunnel == 1:
self.createMsfShellcode(exitfunc="process", format="raw", extra="BufferRegister=EAX", encode="x86/alpha_mixed")
setupSuccess = self.uploadShellcodeexec(web=web)
if setupSuccess is not True:
msg = "unable to mount the operating system takeover"
raise SqlmapFilePathException(msg)
elif tunnel == 2:
setupSuccess = self.uploadIcmpshSlave(web=web)
if setupSuccess is not True:
msg = "unable to mount the operating system takeover"
raise SqlmapFilePathException(msg)
if setupSuccess:
if tunnel == 1:
self.pwn(goUdf)
elif tunnel == 2:
self.icmpPwn()
else:
errMsg = "unable to prompt for an out-of-band session"
raise SqlmapNotVulnerableException(errMsg)
if not conf.cleanup:
self.cleanup(web=web)
def osSmb(self):
self.checkDbmsOs()
if not Backend.isOs(OS.WINDOWS):
errMsg = "the back-end DBMS underlying operating system is "
errMsg += "not Windows: it is not possible to perform the SMB "
errMsg += "relay attack"
raise SqlmapUnsupportedDBMSException(errMsg)
if not isStackingAvailable() and not conf.direct:
if Backend.getIdentifiedDbms() in (DBMS.PGSQL, DBMS.MSSQL):
errMsg = "on this back-end DBMS it is only possible to "
errMsg += "perform the SMB relay attack if stacked "
errMsg += "queries are supported"
raise SqlmapUnsupportedDBMSException(errMsg)
elif Backend.isDbms(DBMS.MYSQL):
debugMsg = "since stacked queries are not supported, "
debugMsg += "sqlmap is going to perform the SMB relay "
debugMsg += "attack via inference blind SQL injection"
logger.debug(debugMsg)
printWarn = True
warnMsg = "it is unlikely that this attack will be successful "
if Backend.isDbms(DBMS.MYSQL):
warnMsg += "because by default MySQL on Windows runs as "
warnMsg += "Local System which is not a real user, it does "
warnMsg += "not send the NTLM session hash when connecting to "
warnMsg += "a SMB service"
elif Backend.isDbms(DBMS.PGSQL):
warnMsg += "because by default PostgreSQL on Windows runs "
warnMsg += "as postgres user which is a real user of the "
warnMsg += "system, but not within the Administrators group"
elif Backend.isDbms(DBMS.MSSQL) and Backend.isVersionWithin(("2005", "2008")):
warnMsg += "because often Microsoft SQL Server %s " % Backend.getVersion()
warnMsg += "runs as Network Service which is not a real user, "
warnMsg += "it does not send the NTLM session hash when "
warnMsg += "connecting to a SMB service"
else:
printWarn = False
if printWarn:
logger.warn(warnMsg)
self.smb()
def osBof(self):
if not isStackingAvailable() and not conf.direct:
return
if not Backend.isDbms(DBMS.MSSQL) or not Backend.isVersionWithin(("2000", "2005")):
errMsg = "the back-end DBMS must be Microsoft SQL Server "
errMsg += "2000 or 2005 to be able to exploit the heap-based "
errMsg += "buffer overflow in the 'sp_replwritetovarbin' "
errMsg += "stored procedure (MS09-004)"
raise SqlmapUnsupportedDBMSException(errMsg)
infoMsg = "going to exploit the Microsoft SQL Server %s " % Backend.getVersion()
infoMsg += "'sp_replwritetovarbin' stored procedure heap-based "
infoMsg += "buffer overflow (MS09-004)"
logger.info(infoMsg)
msg = "this technique is likely to DoS the DBMS process, are you "
msg += "sure that you want to carry with the exploit? [y/N] "
choice = readInput(msg, default="N")
dos = choice and choice[0].lower() == "y"
if dos:
self.initEnv(mandatory=False, detailed=True)
self.getRemoteTempPath()
self.createMsfShellcode(exitfunc="seh", format="raw", extra="-b 27", encode=True)
self.bof()
def uncPathRequest(self):
errMsg = "'uncPathRequest' method must be defined "
errMsg += "into the specific DBMS plugin"
raise SqlmapUndefinedMethod(errMsg)
def _regInit(self):
if not isStackingAvailable() and not conf.direct:
return
self.checkDbmsOs()
if not Backend.isOs(OS.WINDOWS):
errMsg = "the back-end DBMS underlying operating system is "
errMsg += "not Windows"
raise SqlmapUnsupportedDBMSException(errMsg)
self.initEnv()
self.getRemoteTempPath()
def regRead(self):
self._regInit()
if not conf.regKey:
default = "HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion"
msg = "which registry key do you want to read? [%s] " % default
regKey = readInput(msg, default=default)
else:
regKey = conf.regKey
if not conf.regVal:
default = "ProductName"
msg = "which registry key value do you want to read? [%s] " % default
regVal = readInput(msg, default=default)
else:
regVal = conf.regVal
infoMsg = "reading Windows registry path '%s\%s' " % (regKey, regVal)
logger.info(infoMsg)
return self.readRegKey(regKey, regVal, True)
def regAdd(self):
self._regInit()
errMsg = "missing mandatory option"
if not conf.regKey:
msg = "which registry key do you want to write? "
regKey = readInput(msg)
if not regKey:
raise SqlmapMissingMandatoryOptionException(errMsg)
else:
regKey = conf.regKey
if not conf.regVal:
msg = "which registry key value do you want to write? "
regVal = readInput(msg)
if not regVal:
raise SqlmapMissingMandatoryOptionException(errMsg)
else:
regVal = conf.regVal
if not conf.regData:
msg = "which registry key value data do you want to write? "
regData = readInput(msg)
if not regData:
raise SqlmapMissingMandatoryOptionException(errMsg)
else:
regData = conf.regData
if not conf.regType:
default = "REG_SZ"
msg = "which registry key value data-type is it? "
msg += "[%s] " % default
regType = readInput(msg, default=default)
else:
regType = conf.regType
infoMsg = "adding Windows registry path '%s\%s' " % (regKey, regVal)
infoMsg += "with data '%s'. " % regData
infoMsg += "This will work only if the user running the database "
infoMsg += "process has privileges to modify the Windows registry."
logger.info(infoMsg)
self.addRegKey(regKey, regVal, regType, regData)
def regDel(self):
self._regInit()
errMsg = "missing mandatory option"
if not conf.regKey:
msg = "which registry key do you want to delete? "
regKey = readInput(msg)
if not regKey:
raise SqlmapMissingMandatoryOptionException(errMsg)
else:
regKey = conf.regKey
if not conf.regVal:
msg = "which registry key value do you want to delete? "
regVal = readInput(msg)
if not regVal:
raise SqlmapMissingMandatoryOptionException(errMsg)
else:
regVal = conf.regVal
message = "are you sure that you want to delete the Windows "
message += "registry path '%s\%s? [y/N] " % (regKey, regVal)
output = readInput(message, default="N")
if output and output[0] not in ("Y", "y"):
return
infoMsg = "deleting Windows registry path '%s\%s'. " % (regKey, regVal)
infoMsg += "This will work only if the user running the database "
infoMsg += "process has privileges to modify the Windows registry."
logger.info(infoMsg)
self.delRegKey(regKey, regVal)
| V11/volcano | server/sqlmap/plugins/generic/takeover.py | Python | mit | 17,888 | 0.001453 |
#
# Copyright (c) 2013-2020 Contributors to the Eclipse Foundation
#
# See the NOTICE file distributed with this work for additional information regarding copyright
# ownership. All rights reserved. This program and the accompanying materials are made available
# under the terms of the Apache License, Version 2.0 which accompanies this distribution and is
# available at http://www.apache.org/licenses/LICENSE-2.0.txt
# ===============================================================================================
from pygw.base import GeoWaveObject
from .statistic_binning_strategy import StatisticBinningStrategy
from .statistic_type import DataTypeStatisticType, IndexStatisticType, FieldStatisticType
from .binning_strategy_mappings import map_binning_strategy
from ..base.java_transformer import NoOpTransformer
class Statistic(GeoWaveObject):
"""
Base GeoWave statistic.
"""
def __init__(self, java_ref, java_transformer=NoOpTransformer()):
self.java_transformer = java_transformer
super().__init__(java_ref)
def get_statistic_type(self):
"""
Get the statistic type associated with the statistic.
Returns:
The type of this statistic.
"""
pass
def get_description(self):
"""
Gets a description of the statistic.
Returns:
A description of the statistic.
"""
return self._java_ref.getDescription()
def set_tag(self, tag):
"""
Sets the tag of the statistic.
Args:
tag (str): The tag to use for the statistic
"""
self._java_ref.setTag(tag)
def get_tag(self):
"""
Get the tag for the statistic.
Returns:
The tag for this statistic.
"""
return self._java_ref.getTag()
def set_internal(self):
"""
Set the tag of this statistic to the default internal statistic tag.
"""
self._java_ref.setInternal()
def is_internal(self):
"""
Checks if the statistic is an internal statistic.
Returns:
True if the statistic is internal.
"""
return self._java_ref.isInternal()
def set_binning_strategy(self, binning_strategy):
"""
Sets the binning strategy of the statistic.
Args:
binning_strategy (StatisticBinningStrategy): The binning strategy to use for the statistic.
"""
if not isinstance(binning_strategy, StatisticBinningStrategy):
raise AttributeError('Expected an instance of StatisticBinningStrategy')
self._java_ref.setBinningStrategy(binning_strategy.java_ref())
def get_binning_strategy(self):
"""
Gets the binning strategy used by the statistic.
Returns:
The binning strategy used by the statistic.
"""
return map_binning_strategy(self._java_ref.getBinningStrategy())
class IndexStatistic(Statistic):
def get_statistic_type(self):
"""
Get the statistic type associated with the statistic.
Returns:
The type of this statistic.
"""
return IndexStatisticType(self._java_ref.getStatisticType())
def set_index_name(self, name):
"""
Sets the index name of the statistic.
Args:
name (str): The index name to use for the statistic
"""
self._java_ref.setIndexName(name)
def get_index_name(self):
"""
Get the index name associated with the statistic.
Returns:
The index name of this statistic.
"""
return self._java_ref.getIndexName()
class DataTypeStatistic(Statistic):
def get_statistic_type(self):
"""
Get the statistic type associated with the statistic.
Returns:
The type of this statistic.
"""
return DataTypeStatisticType(self._java_ref.getStatisticType())
def set_type_name(self, name):
"""
Sets the type name of the statistic.
Args:
name (str): The type name to use for the statistic
"""
self._java_ref.setTypeName(name)
def get_type_name(self):
"""
Get the type name associated with the statistic.
Returns:
The type name of this statistic.
"""
return self._java_ref.getTypeName()
class FieldStatistic(Statistic):
def get_statistic_type(self):
"""
Get the statistic type associated with the statistic.
Returns:
The type of this statistic.
"""
return FieldStatisticType(self._java_ref.getStatisticType())
def set_type_name(self, name):
"""
Sets the type name of the statistic.
Args:
name (str): The type name to use for the statistic
"""
self._java_ref.setTypeName(name)
def get_type_name(self):
"""
Get the type name associated with the statistic.
Returns:
The type name of this statistic.
"""
return self._java_ref.getTypeName()
def set_field_name(self, field_name):
"""
Sets the field name of the statistic.
Args:
field_name (str): The field name to use for the statistic
"""
self._java_ref.setFieldName(field_name)
def get_field_name(self):
"""
Get the field name associated with the statistic.
Returns:
The field name of this statistic.
"""
return self._java_ref.getFieldName()
| locationtech/geowave | python/src/main/python/pygw/statistics/statistic.py | Python | apache-2.0 | 5,623 | 0.001067 |
# Copyright (C) 2015 Universidad Politecnica de Madrid
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import urllib
from keystone.tests import test_v3
from keystone.common import config as common_cfg
from keystone.contrib.two_factor_auth import controllers
from keystone.contrib.two_factor_auth import core
from keystone.openstack.common import log
from keystone import exception
import pyotp
import json
LOG = log.getLogger(__name__)
TWO_FACTOR_USER_URL = '/users/{user_id}'
TWO_FACTOR_BASE_URL = '/OS-TWO-FACTOR'
AUTH_ENDPOINT = '/two_factor_auth'
QUESTION_ENDPOINT = '/sec_question'
DATA_ENDPOINT = '/two_factor_data'
DEVICES_ENDPOINT = '/devices'
TWO_FACTOR_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + AUTH_ENDPOINT
TWO_FACTOR_QUESTION_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + QUESTION_ENDPOINT
TWO_FACTOR_DATA_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + DATA_ENDPOINT
TWO_FACTOR_DEVICES_URL = TWO_FACTOR_USER_URL + TWO_FACTOR_BASE_URL + DEVICES_ENDPOINT
class TwoFactorBaseTests(test_v3.RestfulTestCase):
EXTENSION_NAME = 'two_factor_auth'
EXTENSION_TO_ADD = 'two_factor_auth_extension'
SAMPLE_SECURITY_QUESTION = 'Sample question'
SAMPLE_SECURITY_ANSWER = 'Sample answer'
def setUp(self):
super(TwoFactorBaseTests, self).setUp()
# Now that the app has been served, we can query CONF values
self.base_url = 'http://localhost/v3'
self.controller = controllers.TwoFactorV3Controller()
self.manager = core.TwoFactorAuthManager()
def _create_two_factor_key(self, user_id, expected_status=None):
data = self.new_ref()
data['security_question'] = self.SAMPLE_SECURITY_QUESTION
data['security_answer'] = self.SAMPLE_SECURITY_ANSWER
return self.post(
TWO_FACTOR_URL.format(user_id=user_id),
body={'two_factor_auth': data},
expected_status=expected_status
)
def _create_two_factor_key_no_data(self, user_id, expected_status=None):
return self.post(
TWO_FACTOR_URL.format(user_id=user_id),
expected_status=expected_status
)
def _delete_two_factor_key(self, user_id, expected_status=None):
return self.delete(TWO_FACTOR_URL.format(user_id=user_id), expected_status=expected_status)
def _check_is_two_factor_enabled(self, expected_status=None, **kwargs):
return self.head(
TWO_FACTOR_BASE_URL + AUTH_ENDPOINT + '?' +urllib.urlencode(kwargs),
expected_status=expected_status)
def _check_security_question(self, user_id, sec_answer, expected_status=None):
body = {
'two_factor_auth': {
'security_answer': sec_answer
}
}
return self.get(TWO_FACTOR_QUESTION_URL.format(user_id=user_id),
expected_status=expected_status,
body=body)
def _get_two_factor_data(self, user_id, expected_status=None):
return self.get(TWO_FACTOR_DATA_URL.format(user_id=user_id),
expected_status=expected_status)
def _remember_device(self, user_id, expected_status=None, **kwargs):
try:
kwargs['user_id'] = user_id
self.manager.is_two_factor_enabled(user_id=user_id)
except exception.NotFound:
self._create_two_factor_key(user_id=user_id)
return json.loads(self.post(TWO_FACTOR_BASE_URL + DEVICES_ENDPOINT + '?' + urllib.urlencode(kwargs)).body)['two_factor_auth']
def _check_for_device(self, expected_status=None, **kwargs):
response = self.head(TWO_FACTOR_BASE_URL + DEVICES_ENDPOINT + '?' + urllib.urlencode(kwargs), expected_status=expected_status)
def _delete_devices(self, user_id, expected_status=None):
return self.delete(TWO_FACTOR_DEVICES_URL.format(user_id=user_id), expected_status=expected_status)
def _create_user(self):
user = self.new_user_ref(domain_id=self.domain_id)
password = user['password']
user = self.identity_api.create_user(user)
user['password'] = password
return user
def _delete_user(self, user_id):
self.delete(TWO_FACTOR_USER_URL.format(user_id=user_id))
class TwoFactorCRUDTests(TwoFactorBaseTests):
def test_two_factor_enable(self):
self._create_two_factor_key(user_id=self.user_id)
def test_two_factor_new_code(self):
key1 = self._create_two_factor_key(user_id=self.user_id)
key2 = self._create_two_factor_key(user_id=self.user_id)
self.assertNotEqual(key1, key2)
def test_two_factor_new_code_no_data_right(self):
self._create_two_factor_key(user_id=self.user_id)
self._create_two_factor_key_no_data(user_id=self.user_id)
def test_two_factor_new_code_no_data_wrong(self):
self._create_two_factor_key_no_data(user_id=self.user_id, expected_status=400)
def test_two_factor_disable_after_enabling(self):
self._create_two_factor_key(user_id=self.user_id)
self._delete_two_factor_key(user_id=self.user_id)
def test_two_factor_disable_without_enabling(self):
self._delete_two_factor_key(user_id=self.user_id, expected_status=404)
def test_two_factor_is_enabled(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(user_id=self.user_id)
def test_two_factor_is_enabled_name_and_domain(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(
user_name=self.user['name'],
domain_id=self.user['domain_id'])
def test_two_factor_is_disabled(self):
self._check_is_two_factor_enabled(user_id=self.user_id, expected_status=404)
def test_two_factor_is_disabled_name_and_domain(self):
self._check_is_two_factor_enabled(
user_name=self.user['name'],
domain_id=self.user['domain_id'],
expected_status=404)
def test_two_factor_check_no_params(self):
self._check_is_two_factor_enabled(expected_status=400)
def test_two_factor_check_no_domain(self):
self._check_is_two_factor_enabled(
user_name=self.user['name'],
expected_status=400)
def test_two_factor_check_no_username(self):
self._check_is_two_factor_enabled(
domain_id=self.user['domain_id'],
expected_status=400)
def test_two_factor_is_enabled_after_deleting(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(user_id=self.user_id)
self._delete_two_factor_key(user_id=self.user_id)
self._check_is_two_factor_enabled(user_id=self.user_id, expected_status=404)
def test_two_factor_create_key_for_nonexistent_user(self):
self._create_two_factor_key(user_id='nonexistent_user', expected_status=404)
def test_two_factor_delete_user(self):
user = self._create_user()
self._create_two_factor_key(user_id=user['id'])
self._check_is_two_factor_enabled(user_id=user['id'])
self._delete_user(user['id'])
self._check_is_two_factor_enabled(user_id=user['id'], expected_status=404)
class TwoFactorSecQuestionTests(TwoFactorBaseTests):
def test_security_question_get(self):
self._create_two_factor_key(user_id=self.user_id)
data = self._get_two_factor_data(user_id=self.user_id)
self.assertEqual(data.result['two_factor_auth']['security_question'],
self.SAMPLE_SECURITY_QUESTION)
def test_security_question_correct(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_security_question(user_id=self.user_id,
sec_answer=self.SAMPLE_SECURITY_ANSWER)
def test_security_question_wrong(self):
self._create_two_factor_key(user_id=self.user_id)
self._check_security_question(user_id=self.user_id,
sec_answer='Wrong answer',
expected_status=401)
def test_security_question_nonexistent(self):
self._check_security_question(user_id=self.user_id,
sec_answer='Does not matter',
expected_status=404)
class TwoFactorDevicesCRUDTests(TwoFactorBaseTests):
def test_remember_device(self):
self._remember_device(user_id=self.user_id)
def test_remember_device_name_and_domain(self):
self._remember_device(user_id=self.user_id,
user_name=self.user['name'],
domain_id=self.user['domain_id'])
def test_device_right_data(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
def test_device_right_data_name_and_domain(self):
data = self._remember_device(user_id=self.user_id,
user_name=self.user['name'],
domain_id=self.user['domain_id'])
self._check_for_device(user_name=self.user['name'],
domain_id=self.user['domain_id'],
device_id=data['device_id'],
device_token=data['device_token'])
def test_device_updates_token(self):
data = self._remember_device(user_id=self.user_id)
new_data = self._remember_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
self.assertEqual(new_data['device_id'], data['device_id'])
self.assertNotEqual(new_data['device_token'], data['device_token'])
def test_device_wrong_user(self):
user = self._create_user()
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=user['id'],
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
def test_device_wrong_device(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id='just_another_device',
device_token=data['device_token'],
expected_status=404)
def test_device_fake_token(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token='fake_token',
expected_status=404)
def test_device_old_token(self):
data = self._remember_device(user_id=self.user_id)
self._remember_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=403)
def test_device_delete_all(self):
data = self._remember_device(user_id=self.user_id)
self._delete_devices(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
def test_device_does_not_delete_all_devices_when_fake_token(self):
data = self._remember_device(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token='fake_token',
expected_status=404)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
def test_device_deletes_all_devices_when_old_token(self):
data = self._remember_device(user_id=self.user_id)
new_data = self._remember_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'])
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=403)
self._check_for_device(user_id=self.user_id,
device_id=new_data['device_id'],
device_token=new_data['device_token'],
expected_status=404)
def test_device_delete_user(self):
user = self._create_user()
data = self._remember_device(user_id=user['id'])
self._delete_user(user['id'])
self._check_for_device(user_id=user['id'],
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
def test_device_disable_two_factor(self):
data = self._remember_device(user_id=self.user_id)
self._delete_two_factor_key(user_id=self.user_id)
self._check_for_device(user_id=self.user_id,
device_id=data['device_id'],
device_token=data['device_token'],
expected_status=404)
class TwoFactorAuthTests(TwoFactorBaseTests):
def auth_plugin_config_override(self, methods=None, **method_classes):
if methods is None:
methods = ['external', 'password', 'token', 'oauth1', 'saml2', 'oauth2']
if not method_classes:
method_classes = dict(
external='keystone.auth.plugins.external.DefaultDomain',
password='keystone.auth.plugins.two_factor.TwoFactor',
token='keystone.auth.plugins.token.Token',
oauth1='keystone.auth.plugins.oauth1.OAuth',
saml2='keystone.auth.plugins.saml2.Saml2',
oauth2='keystone.auth.plugins.oauth2.OAuth2',
)
self.config_fixture.config(group='auth', methods=methods)
common_cfg.setup_authentication()
if method_classes:
self.config_fixture.config(group='auth', **method_classes)
def _auth_body(self, **kwargs):
body = {
"auth": {
"identity": {
"methods": [
"password"
],
"password": {
"user": {
}
},
}
}
}
payload = body['auth']['identity']['password']
if 'user_id' in kwargs:
payload['user']['id'] = kwargs['user_id']
if 'password' in kwargs:
payload['user']['password'] = kwargs['password']
if 'user_name' in kwargs:
payload['user']['name'] = kwargs['user_name']
if 'domain_id' in kwargs:
payload['user']['domain'] = {}
payload['user']['domain']['id'] = kwargs['domain_id']
if 'verification_code' in kwargs:
payload['user']['verification_code'] = kwargs['verification_code']
if 'device_data' in kwargs:
payload['user']['device_data'] = kwargs['device_data']
return body
def _authenticate(self, auth_body, expected_status=201):
return self.post('/auth/tokens', body=auth_body, expected_status=expected_status, noauth=True)
def _get_current_code(self, user_id):
two_factor_info = self.manager.get_two_factor_info(user_id)
totp = pyotp.TOTP(two_factor_info.two_factor_key)
return totp.now()
def test_auth_correct(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(user_id=self.user_id,
password=self.user['password'],
verification_code=self._get_current_code(self.user_id))
self._authenticate(auth_body=req)
def test_auth_correct_two_factor_disabled(self):
req = self._auth_body(
user_id=self.user_id,
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_correct_name_and_domain(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(
user_name=self.user['name'],
domain_id=self.user['domain_id'],
verification_code=self._get_current_code(self.user_id),
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_correct_two_factor_disabled_name_and_domain(self):
req = self._auth_body(
user_name=self.user['name'],
domain_id=self.user['domain_id'],
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_no_code(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(
user_id=self.user_id,
password=self.user['password'])
self._authenticate(auth_body=req, expected_status=400)
def test_auth_wrong_code(self):
self._create_two_factor_key(user_id=self.user_id)
req = self._auth_body(
user_id=self.user_id,
verification_code='123456',
password=self.user['password'])
self._authenticate(auth_body=req, expected_status=401)
def test_auth_right_device_data(self):
self._create_two_factor_key(user_id=self.user_id)
data = self.manager.remember_device(user_id=self.user_id)
req = self._auth_body(
user_id=self.user_id,
device_data=data,
password=self.user['password'])
self._authenticate(auth_body=req)
def test_auth_device_data_from_another_user(self):
user = self._create_user()
self._create_two_factor_key(user_id=user['id'])
self._create_two_factor_key(user_id=self.user_id)
user_device = self.manager.remember_device(user_id=self.user_id)
new_user_device = self.manager.remember_device(user_id=user['id'])
req = self._auth_body(
user_id=self.user_id,
device_data=new_user_device,
password=self.user['password'])
self._authenticate(auth_body=req, expected_status=401)
| ging/keystone | keystone/tests/test_v3_two_factor_auth.py | Python | apache-2.0 | 19,566 | 0.001738 |
import asyncio
import random
import names
from chilero.web.test import asynctest
from chilero.pg import Resource
from chilero.pg.test import TestCase, TEST_DB_SUFFIX
import json
class Friends(Resource):
order_by = 'name ASC'
search_fields = ['name']
allowed_fields = ['name', 'meta']
required_fields = ['name']
allow_order_by = ['name']
def serialize_object(self, row):
return dict(
id=row[0],
name=row[1],
meta=row[2],
url=self.get_object_url(row[0])
)
def serialize_list_object(self, row):
return dict(
name=row[1],
url=self.get_object_url(row[0])
)
class Friends2(Resource):
order_by = 'name ASC'
search_fields = ['name']
allowed_fields = ['name', 'meta']
required_fields = ['name']
allow_order_by = ['name']
table_name = 'friends'
def serialize_object(self, row):
return dict(
id=row[0],
name=row[1],
meta=row[2],
url=self.get_object_url(row[0])
)
def serialize_list_object(self, row):
return dict(
name=row[1],
url=self.get_object_url(row[0])
)
def index(self):
condition = dict(name='pedro', meta='{}')
index = yield from self.do_index(condition)
return self.response(index)
class BaseTestCase(TestCase):
settings = dict(
db_url='postgres://postgres@localhost:5432/chilero_pg_{}'.format(
TEST_DB_SUFFIX
)
)
routes = [
['/friends', Friends],
['/friends2', Friends2]
]
@asyncio.coroutine
def _create_friend(self, **kwargs):
defaults = dict(
name=self._random_string(),
meta=json.dumps(dict(name='name1', data2='data2'))
)
return(
yield from self._create_and_get('/friends', kwargs, defaults)
)
class TestAdvancedOptions(BaseTestCase):
@asyncio.coroutine
def _a_lot_of_friends(self):
# create a lot of friends
all_names = []
for i in range(100):
name = names.get_full_name()+str(i)
all_names.append(name)
_, f = yield from self._create_friend(name=name)
t = yield from _.text()
print(t)
assert _.status==201
_.close()
return all_names
@asynctest
def test_pagination(self):
yield from self._a_lot_of_friends()
# list with default values
# page 1
r = yield from self._get_json(self.full_url('/friends'))
assert r['data']['count'] >= 100
assert r['data']['prev'] == None
assert 'offset=20' in r['data']['next']
assert 'limit=20' in r['data']['next']
assert len(r['index']) == r['data']['length']
# page 2
r = yield from self._get_json(r['data']['next'])
assert 'offset=0' in r['data']['prev']
assert 'offset=40' in r['data']['next']
assert len(r['index']) == r['data']['length']
assert len(r['index'][0].keys()) == 2
@asynctest
def test_pagination_no_limit(self):
yield from self._a_lot_of_friends()
# list with no limit
r = yield from self._get_json(self.full_url('/friends?limit=0'))
assert r['data']['count'] >= 100
assert r['data']['prev'] == None
assert r['data']['next'] == None
assert r['data']['length'] == r['data']['count']
assert len(r['index']) == r['data']['count']
@asynctest
def test_search_pagination(self):
rnames = list((yield from self._a_lot_of_friends()))
rname = random.choice(rnames).split()[0]
for i in range(5):
name = '{} {}'.format(rname, names.get_last_name())
_, friend = yield from self._create_friend(name=name)
_.close()
rname = rname.lower()
r = yield from self._get_json(
self.full_url('/friends?search={}&limit=1'.format(rname))
)
assert r['data']['count'] >= 1
assert rname in r['data']['next']
while r['data']['next']:
r = yield from self._get_json(r['data']['next'])
if r['data']['next'] is not None:
assert rname in r['data']['next']
assert rname in r['data']['prev']
rname.lower() in r['index'][0]['name'].lower()
@asynctest
def test_oreder_by_ASC(self):
yield from self._a_lot_of_friends()
name = 'Abel Barrera'
_, friend = yield from self._create_friend(name=name)
_.close()
url = self.full_url('/friends?order_by={}'.format('name'))
resp = yield from self._get_json(url)
assert resp['index'][0]['name'].startswith('A')
@asynctest
def test_oreder_by_400(self):
yield from self._a_lot_of_friends()
url = self.full_url('/friends?order_by={}'.format('other'))
resp = yield from self._get(url)
assert resp.status == 400
@asynctest
def test_oreder_by_desc(self):
yield from self._a_lot_of_friends()
defaults = dict(
name='Zarahi zuna'
)
resp = yield from self._create('/friends', defaults)
assert resp.status == 201
resp.close()
url = self.full_url('/friends?order_by={}'.format('-name'))
resp = yield from self._get_json(url)
assert resp['index'][0]['name'].startswith('Z')
class TestBasic(BaseTestCase):
# Test common REST actions
@asynctest
def test_index(self):
resp = yield from self._get(self.full_url('/friends'))
assert resp.status == 200
resp.close()
@asynctest
def test_index_json(self):
resp = yield from self._index('/friends')
assert isinstance(resp, dict)
assert 'index' in resp
@asynctest
def test_index_json_condition(self):
resp = yield from self._index('/friends2')
assert isinstance(resp, dict)
assert 'index' in resp
@asynctest
def test_create(self):
name = self._random_string()
_, friend = yield from self._create_friend(name=name)
assert _.status == 201
_.close()
assert friend['name'] == name
assert len(friend.keys()) == 4
efriend = yield from self._delete(friend['url'])
assert efriend.status==200
@asynctest
def test_create_error(self):
_, friend = yield from self._create_friend(wrong_field=123)
assert _.status == 400
_.close()
@asynctest
def test_create_conflict(self):
name = names.get_full_name()
_, friend = yield from self._create_friend(name=name)
_.close()
_, friend = yield from self._create_friend(name=name)
assert _.status == 409
_.close()
@asynctest
def test_update(self):
_, friend = yield from self._create_friend()
_.close()
new_name = self._random_string()
presp = yield from self._patch(friend['url'], name=new_name)
assert presp.status == 204
presp.close()
updated_friend = yield from self._get_json(friend['url'])
assert updated_friend['body']['name'] == new_name
@asynctest
def test_search(self):
name = 'some known name'
_, friend = yield from self._create_friend(name=name)
_.close()
results = yield from self._search('/friends', terms='known name')
assert len(results['index']) > 0
assert results['index'][0]['name'] == name
@asynctest
def test_view_404(self):
resp = yield from self._get(self.full_url('/friends/999999'))
assert resp.status == 404
resp.close()
@asynctest
def test_update_400(self):
_, friend = yield from self._create_friend()
_.close()
new_name = self._random_string()
presp = yield from self._patch(friend['url'], names=new_name)
assert presp.status == 400
presp.close()
@asynctest
def test_update_empty_required_400(self):
_, friend = yield from self._create_friend()
_.close()
new_name = " "
presp = yield from self._patch(friend['url'], name=new_name)
assert presp.status == 400
presp.close()
@asynctest
def test_update_None_required_400(self):
_, friend = yield from self._create_friend()
_.close()
new_name = None
presp = yield from self._patch(friend['url'], name=new_name)
assert presp.status == 400
presp.close()
| dmonroy/chilero.pg | tests/test_sample_app.py | Python | mit | 8,628 | 0.000695 |
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from collections import Mapping
from datetime import datetime
import re
from pyLibrary import convert
from mo_collections import reverse
from mo_logs import Log
from mo_logs.strings import quote
from mo_math import Math
from mo_dots import split_field, Data, Null, join_field, coalesce, listwrap
from mo_times.durations import Duration
class _MVEL(object):
def __init__(self, fromData, isLean=False):
self.fromData = fromData
self.isLean = isLean
self.prefixMap = []
self.functions = {}
def code(self, query):
"""
RETURN THE MVEL THAT WILL FILTER USING query.where AND TERM-PACK THE query.select CLAUSE
"""
selectList = listwrap(query.select)
fromPath = query.frum.name # FIRST NAME IS THE INDEX
sourceVar = "__sourcedoc__"
whereClause = query.where
# PARSE THE fromPath
code = self.frum(fromPath, sourceVar, "__loop")
select = self.select(selectList, fromPath, "output", sourceVar)
body = "var output = \"\";\n" + \
code.replace(
"<CODE>",
"if (" + _where(whereClause, lambda(v): self._translate(v)) + "){\n" +
select.body +
"}\n"
) + \
"output\n"
# ADD REFERENCED CONTEXT VARIABLES
context = self.getFrameVariables(body)
func = UID()
predef = addFunctions(select.head+context+body).head
param = "_source" if body.find(sourceVar) else ""
output = predef + \
select.head + \
context + \
'var ' + func + ' = function('+sourceVar+'){\n' + \
body + \
'};\n' + \
func + '('+param+')\n'
return Compiled(output)
def frum(self, fromPath, sourceVar, loopVariablePrefix):
"""
indexName NAME USED TO REFER TO HIGH LEVEL DOCUMENT
loopVariablePrefix PREFIX FOR LOOP VARIABLES
"""
loopCode = "if (<PATH> != null){ for(<VAR> : <PATH>){\n<CODE>\n}}\n"
self.prefixMap = []
code = "<CODE>"
path = split_field(fromPath)
# ADD LOCAL VARIABLES
columns = INDEX_CACHE[path[0]].columns
for i, c in enumerate(columns):
if c.name.find("\\.") >= 0:
self.prefixMap.insert(0, {
"path": c.name,
"variable": "get(" + sourceVar + ", \"" + c.name.replace("\\.", ".") + "\")"
})
else:
self.prefixMap.insert(0, {
"path": c.name,
"variable": sourceVar + ".?" + c.name
})
# ADD LOOP VARIABLES
currPath = []
# self.prefixMap.insert(0, {"path": path[0], "variable": path[0]})
for i, step in enumerate(path[1::]):
loopVariable = loopVariablePrefix + str(i)
currPath.append(step)
pathi = ".".join(currPath)
shortPath = self._translate(pathi)
self.prefixMap.insert(0, {"path": pathi, "variable": loopVariable})
loop = loopCode.replace("<VAR>", loopVariable).replace("<PATH>", shortPath)
code = code.replace("<CODE>", loop)
return code
def _translate(self, variableName):
shortForm = variableName
for p in self.prefixMap:
prefix = p["path"]
if shortForm == prefix:
shortForm = p["variable"]
else:
shortForm = replacePrefix(shortForm, prefix + ".", p["variable"] + ".?") # ADD NULL CHECK
shortForm = replacePrefix(shortForm, prefix + "[", p["variable"] + "[")
return shortForm
# CREATE A PIPE DELIMITED RESULT SET
def select(self, selectList, fromPath, varName, sourceVar):
path = split_field(fromPath)
is_deep = len(path) > 1
heads = []
list = []
for s in selectList:
if is_deep:
if s.value and isKeyword(s.value):
shortForm = self._translate(s.value)
list.append("Value2Pipe(" + shortForm + ")\n")
else:
Log.error("do not know how to handle yet")
else:
if s.value and isKeyword(s.value):
list.append("Value2Pipe(getDocValue(" + value2MVEL(s.value) + "))\n")
elif s.value:
shortForm = self._translate(s.value)
list.append("Value2Pipe(" + shortForm + ")\n")
else:
code, decode = self.Parts2Term(s.domain)
heads.append(code.head)
list.append("Value2Pipe(" + code.body + ")\n")
if len(split_field(fromPath)) > 1:
output = 'if (' + varName + ' != "") ' + varName + '+="|";\n' + varName + '+=' + '+"|"+'.join(["Value2Pipe("+v+")\n" for v in list]) + ';\n'
else:
output = varName + ' = ' + '+"|"+'.join(["Value2Pipe("+v+")\n" for v in list]) + ';\n'
return Data(
head="".join(heads),
body=output
)
def Parts2Term(self, domain):
"""
TERMS ARE ALWAYS ESCAPED SO THEY CAN BE COMPOUNDED WITH PIPE (|)
CONVERT AN ARRAY OF PARTS{name, esfilter} TO AN MVEL EXPRESSION
RETURN expression, function PAIR, WHERE
expression - MVEL EXPRESSION
function - TAKES RESULT OF expression AND RETURNS PART
"""
fields = domain.dimension.fields
term = []
if len(split_field(self.fromData.name)) == 1 and fields:
if isinstance(fields, Mapping):
# CONVERT UNORDERED FIELD DEFS
jx_fields, es_fields = zip(*[(k, fields[k]) for k in sorted(fields.keys())])
else:
jx_fields, es_fields = zip(*[(i, e) for i, e in enumerate(fields)])
# NO LOOPS BECAUSE QUERY IS SHALLOW
# DOMAIN IS FROM A DIMENSION, USE IT'S FIELD DEFS TO PULL
if len(es_fields) == 1:
def fromTerm(term):
return domain.getPartByKey(term)
return Data(
head="",
body='getDocValue('+quote(domain.dimension.fields[0])+')'
), fromTerm
else:
def fromTerm(term):
terms = [convert.pipe2value(t) for t in convert.pipe2value(term).split("|")]
candidate = dict(zip(jx_fields, terms))
for p in domain.partitions:
for k, t in candidate.items():
if p.value[k] != t:
break
else:
return p
if domain.type in ["uid", "default"]:
part = {"value": candidate}
domain.partitions.append(part)
return part
else:
return Null
for f in es_fields:
term.append('Value2Pipe(getDocValue('+quote(f)+'))')
return Data(
head="",
body='Value2Pipe('+('+"|"+'.join(term))+')'
), fromTerm
else:
for v in domain.partitions:
term.append("if (" + _where(v.esfilter, lambda x: self._translate(x)) + ") " + value2MVEL(domain.getKey(v)) + "; else ")
term.append(value2MVEL(domain.getKey(domain.NULL)))
func_name = "_temp"+UID()
return self.register_function("+\"|\"+".join(term))
def Parts2TermScript(self, domain):
code, decode = self.Parts2Term(domain)
func = addFunctions(code.head + code.body)
return func.head + code.head + code.body, decode
def getFrameVariables(self, body):
contextVariables = []
columns = self.fromData.columns
parentVarNames = set() # ALL PARENTS OF VARIABLES WITH "." IN NAME
body = body.replace(".?", ".")
for i, c in enumerate(columns):
j = body.find(c.name, 0)
while j >= 0:
s = j
j = body.find(c.name, s + 1)
test0 = body[s - 1: s + len(c.name) + 1:]
test3 = body[s - 8: s + len(c.name):]
if test0[:-1] == "\"" + c.name:
continue
if test3 == "_source." + c.name:
continue
def defParent(name):
# DO NOT MAKE THE SAME PARENT TWICE
if name in parentVarNames:
return
parentVarNames.add(name)
if len(split_field(name)) == 1:
contextVariables.append("Map " + name + " = new HashMap();\n")
else:
defParent(join_field(split_field(name)[0:-1]))
contextVariables.append(name + " = new HashMap();\n")
body = body.replace(c.name, "-"*len(c.name))
if self.isLean or c.useSource:
if len(split_field(c.name)) > 1:
defParent(join_field(split_field(c.name)[0:-1]))
contextVariables.append(c.name + " = getSourceValue(\"" + c.name + "\");\n")
else:
contextVariables.append(c.name + " = _source[\"" + c.name + "\"];\n")
else:
if len(split_field(c.name)) > 1:
defParent(join_field(split_field(c.name)[0:-1]))
contextVariables.append(c.name + " = getDocValue(\"" + c.name + "\");\n")
else:
contextVariables.append(c.name + " = getDocValue(\"" + c.name + "\");\n")
break
return "".join(contextVariables)
def compile_expression(self, expression, constants=None):
# EXPAND EXPRESSION WITH ANY CONSTANTS
expression = setValues(expression, constants)
fromPath = self.fromData.name # FIRST NAME IS THE INDEX
indexName = join_field(split_field(fromPath)[:1:])
context = self.getFrameVariables(expression)
if context == "":
return addFunctions(expression).head+expression
func = UID()
code = addFunctions(context+expression)
output = code.head + \
'var ' + func + ' = function(' + indexName + '){\n' + \
context + \
expression + ";\n" + \
'};\n' + \
func + '(_source)\n'
return Compiled(output)
def register_function(self, code):
for n, c in self.functions.items():
if c == code:
break
else:
n = "_temp" + UID()
self.functions[n] = code
return Data(
head='var ' + n + ' = function(){\n' + code + '\n};\n',
body=n + '()\n'
)
class Compiled(object):
def __init__(self, code):
self.code=code
def __str__(self):
return self.code
def __data__(self):
return self.code
__UID__ = 1000
def UID():
output = "_" + str(__UID__)
globals()["__UID__"] += 1
return output
def setValues(expression, constants):
if not constants:
return expression
constants = constants.copy()
# EXPAND ALL CONSTANTS TO PRIMITIVE VALUES (MVEL CAN ONLY ACCEPT PRIMITIVE VALUES)
for c in constants:
value = c.value
n = c.name
if len(split_field(n)) >= 3:
continue # DO NOT GO TOO DEEP
if isinstance(value, list):
continue # DO NOT MESS WITH ARRAYS
if isinstance(value, Mapping):
for k, v in value.items():
constants.append({"name": n + "." + k, "value": v})
for c in reverse(constants):# REVERSE ORDER, SO LONGER NAMES ARE TESTED FIRST
s = 0
while True:
s = expression.find(c.name, s)
if s == -1:
break
if re.match(r"\w", expression[s - 1]):
break
if re.match(r"\w", expression[s + len(c.name)]):
break
v = value2MVEL(c.value)
expression = expression[:s:] + "" + v + expression[:s + len(c.name):]
return expression
def unpack_terms(facet, selects):
# INTERPRET THE TERM-PACKED ES RESULTS AND RETURN DATA CUBE
# ASSUME THE .term IS JSON OBJECT WITH ARRAY OF RESULT OBJECTS
mod = len(selects)
output = []
for t in facet.terms:
if t.term == "":
continue # NO DATA
value = []
for i, v in enumerate(t.term.split("|")):
value.append(convert.pipe2value(v))
if ((i + 1) % mod) == 0:
value.append(t.count)
output.append(value)
value = []
return output
# PASS esFilter SIMPLIFIED ElasticSearch FILTER OBJECT
# RETURN MVEL EXPRESSION
def _where(esFilter, _translate):
if not esFilter or esFilter is True:
return "true"
keys = esFilter.keys()
if len(keys) != 1:
Log.error("Expecting only one filter aggregate")
op = keys[0]
if op == "and":
list = esFilter[op]
if not (list):
return "true"
if len(list) == 1:
return _where(list[0], _translate)
output = "(" + " && ".join(_where(l, _translate) for l in list) + ")"
return output
elif op == "or":
list = esFilter[op]
if not list:
return "false"
if len(list) == 1:
return _where(list[0], _translate)
output = "(" + " || ".join(_where(l, _translate) for l in list) + ")"
return output
elif op == "not":
return "!(" + _where(esFilter[op, _translate]) + ")"
elif op == "term":
pair = esFilter[op]
if len(pair.keys()) == 1:
return [_translate(k) + "==" + value2MVEL(v) for k, v in pair.items()][0]
else:
return "(" + " && ".join(_translate(k) + "==" + value2MVEL(v) for k, v in pair.items()) + ")"
elif op == "terms":
output = []
for variableName, valueList in esFilter[op].items():
if not valueList:
Log.error("Expecting something in 'terms' array")
if len(valueList) == 1:
output.append(_translate(variableName) + "==" + value2MVEL(valueList[0]))
else:
output.append("(" + " || ".join(_translate(variableName) + "==" + value2MVEL(v) for v in valueList) + ")")
return " && ".join(output)
elif op == "exists":
# "exists":{"field":"myField"}
pair = esFilter[op]
variableName = pair.field
return "(" + _translate(variableName) + "!=null)"
elif op == "missing":
fieldName = _translate(esFilter[op].field)
testExistence = coalesce(esFilter[op].existence, True)
testNull = coalesce(esFilter[op].null_value, True)
output = []
if testExistence and not testNull:
output.append("(" + fieldName.replace(".?", ".") + " == empty)") # REMOVE THE .? SO WE REFER TO THE FIELD, NOT GET THE VALUE
if testNull:
output.append("(" + fieldName + "==null)")
return " || ".join(output)
elif op == "range":
pair = esFilter[op]
ranges = []
for variableName, r in pair.items():
if r.gte:
ranges.append(value2MVEL(r.gte) + "<=" + _translate(variableName))
elif r.gt:
ranges.append(value2MVEL(r.gt) + "<" + _translate(variableName))
elif r["from"]:
if r.include_lower == None or r.include_lower:
ranges.append(value2MVEL(r["from"]) + "<=" + _translate(variableName))
else:
ranges.append(value2MVEL(r["from"]) + "<" + _translate(variableName))
if r.lte:
ranges.append(value2MVEL(r.lte) + ">=" + _translate(variableName))
elif r.lt:
ranges.append(value2MVEL(r.lt) + ">" + _translate(variableName))
elif r["from"]:
if r.include_lower == None or r.include_lower:
ranges.append(value2MVEL(r["from"]) + ">=" + _translate(variableName))
else:
ranges.append(value2MVEL(r["from"]) + ">" + _translate(variableName))
return "("+" && ".join(ranges)+")"
elif op == "script":
script = esFilter[op].script
return _translate(script)
elif op == "prefix":
pair = esFilter[op]
variableName, value = pair.items()[0]
return _translate(variableName) + ".startsWith(" + quote(value) + ")"
elif op == "match_all":
return "true"
else:
Log.error("'" + op + "' is an unknown aggregate")
return ""
VAR_CHAR = "abcdefghijklmnopqurstvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_.\""
keyword_pattern = re.compile(r"\.*\w*(?:\.\w+)*")
def isKeyword(value):
"""
RETURN TRUE IF THE value IS JUST A NAME OF A FIELD, A LIST OF FIELDS, (OR A VALUE)
"""
if not value or not isinstance(value, basestring):
Log.error("Expecting a string")
if keyword_pattern.match(value):
return True
return False
def value2MVEL(value):
"""
FROM PYTHON VALUE TO MVEL EQUIVALENT
"""
if isinstance(value, datetime):
return str(convert.datetime2milli(value)) + " /*" + value.format("yyNNNdd HHmmss") + "*/" # TIME
if isinstance(value, Duration):
return str(convert.timedelta2milli(value)) + " /*" + str(value) + "*/" # DURATION
if Math.is_number(value):
return str(value)
return quote(value)
# FROM PYTHON VALUE TO ES QUERY EQUIVALENT
def value2query(value):
if isinstance(value, datetime):
return convert.datetime2milli(value)
if isinstance(value, Duration):
return value.milli
if Math.is_number(value):
return value
return quote(value)
def value2value(value):
"""
CONVERT FROM PYTHON VALUE TO ES EQUIVALENT
"""
if isinstance(value, datetime):
return convert.datetime2milli(value)
if isinstance(value, Duration):
return value.milli # DURATION
return value
def addFunctions(mvel):
"""
PREPEND THE REQUIRED MVEL FUNCTIONS TO THE CODE
"""
isAdded = Data() # SOME FUNCTIONS DEPEND ON OTHERS
head=[]
body=mvel
keepAdding = True
while keepAdding:
keepAdding = False
for func_name, func_code in FUNCTIONS.items():
if isAdded[func_name]:
continue
if mvel.find(func_name) == -1:
continue
keepAdding = True
isAdded[func_name] = func_code
head.append(func_code)
mvel = func_code + mvel
return Data(
head="".join(head),
body=body
)
FUNCTIONS = {
"String2Quote":
"var String2Quote = function(str){\n" +
"if (!(str is String)){ str; }else{\n" + # LAST VALUE IS RETURNED. "return" STOPS EXECUTION COMPLETELY!
"" + value2MVEL("\"") + "+" +
"str.replace(" + value2MVEL("\\") + "," + value2MVEL("\\\\") +
").replace(" + value2MVEL("\"") + "," + value2MVEL("\\\"") +
").replace(" + value2MVEL("\'") + "," + value2MVEL("\\\'") + ")+" +
value2MVEL("\"") + ";\n" +
"}};\n",
"Value2Pipe":
'var Value2Pipe = function(value){\n' + # SPACES ARE IMPORTANT BETWEEN "="
"if (value==null){ \"0\" }else " +
"if (value is ArrayList || value is org.elasticsearch.common.mvel2.util.FastList){" +
"var out = \"\";\n" +
"foreach (v : value) out = (out==\"\") ? v : out + \"|\" + Value2Pipe(v);\n" +
"'a'+Value2Pipe(out);\n" +
"}else \n" +
"if (value is Long || value is Integer || value is Double){ 'n'+value; }else \n" +
"if (!(value is String)){ 's'+value.getClass().getName(); }else \n" +
'"s"+value.replace("\\\\", "\\\\\\\\").replace("|", "\\\\p");' + # CAN NOT value TO MAKE NUMBER A STRING (OR EVEN TO PREPEND A STRING!)
"};\n",
# "replaceAll":
# "var replaceAll = function(output, find, replace){\n" +
# "if (output.length()==0) return output;\n"+
# "s = output.indexOf(find, 0);\n" +
# "while(s>=0){\n" +
# "output=output.replace(find, replace);\n" +
# "s=s-find.length()+replace.length();\n" +
# "s = output.indexOf(find, s);\n" +
# "}\n"+
# "output;\n"+
# '};\n',
"floorDay":
"var floorDay = function(value){ Math.floor(value/86400000))*86400000;};\n",
"floorInterval":
"var floorInterval = function(value, interval){ Math.floor((double)value/(double)interval)*interval;};\n",
"maximum": # JUST BECAUSE MVEL'S MAX ONLY USES MAX(int, int). G*DDA*NIT!
"var maximum = function(a, b){if (a==null) b; else if (b==null) a; else if (a>b) a; else b;\n};\n",
"minimum": # JUST BECAUSE MVEL'S MAX ONLY USES MAX(int, int). G*DDA*NIT!
"var minimum = function(a, b){if (a==null) b; else if (b==null) a; else if (a<b) a; else b;\n};\n",
"coalesce": # PICK FIRST NOT-NULL VALUE
"var coalesce = function(a, b){if (a==null) b; else a; \n};\n",
"zero2null": # ES MAKES IT DIFFICULT TO DETECT NULL/MISSING VALUES, BUT WHEN DEALING WITH NUMBERS, ES DEFAULTS TO RETURNING ZERO FOR missing VALUES!!
"var zero2null = function(a){if (a==0) null; else a; \n};\n",
"get": # MY OWN PERSONAL *FU* TO THE TWISTED MVEL PROPERTY ACCESS
"var get = function(hash, key){\n" +
"if (hash==null) null; else hash[key];\n" +
"};\n",
"isNumeric":
"var isNumeric = function(value){\n" +
"value = value + \"\";\n" +
# "try{ value-0; }catch(e){ 0; }"+
"var isNum = value.length()>0;\n" +
"for (v : value.toCharArray()){\n" +
"if (\"0123456789\".indexOf(v)==-1) isNum = false;\n" +
"};\n" +
"isNum;\n" +
"};\n",
"alpha2zero":
"var alpha2zero = function(value){\n" +
"var output = 0;\n" +
"if (isNumeric(value)) output = value-0;\n" +
"return output;" +
"};\n",
# KANBAN SOFTWARE
# CAN SEE QUEUE BLOCKAGES AND SEE SINGLE BLOCKERS
"concat":
"var concat = function(array){\n" +
"if (array==null) \"\"; else {\n" +
"var output = \"\";\n" +
"for (v : array){ output = output+\"|\"+v+\"|\"; };\n" +
"output;\n" +
"}};\n",
# "contains":
# "var contains = function(array, value){\n"+
# "if (array==null) false; else {\n"+
# "var good = false;\n"+
# "for (v : array){ if (v==value) good=true; };\n"+
# "good;\n"+
# "}};\n",
"getFlagValue": # SPECIFICALLY FOR cf_* FLAGS: CONCATENATE THE ATTRIBUTE NAME WITH ATTRIBUTE VALUE, IF EXISTS
"var getFlagValue = function(name){\n" +
"if (_source[name]!=null)" +
"\" \"+name+_source[name];\n" +
"else \n" +
"\"\";\n" +
"};\n",
"getDocValue":
"var getDocValue = function(name){\n" +
"var out = [];\n" +
"var v = doc[name];\n" +
# "if (v is org.elasticsearch.common.mvel2.ast.Function) v = v();=n" +
"if (v==null || v.value==null) { null; } else\n" +
"if (v.values.size()<=1){ v.value; } else\n" + # ES MAKES NO DISTINCTION BETWEEN v or [v], SO NEITHER DO I
"{for(k : v.values) out.add(k); out;}" +
"};\n",
"getSourceValue":
"var getSourceValue = function(name){\n" +
"var out = [];\n" +
"var v = _source[name];\n" +
# "if (v is org.elasticsearch.common.mvel2.ast.Function) v = v();=n" +
"if (v==null) { null; } else\n" +
"if (v[\"values\"]==null || v.values.size()<=1){ v.value; } else {\n" + # ES MAKES NO DISTINCTION BETWEEN v or [v], SO NEITHER DO I
"for(k : v) out.add(k); out;\n" + # .size() MUST BE USED INSTEAD OF .length, THE LATTER WILL CRASH IF JITTED (https://github.com/elasticsearch/elasticsearch/issues/3094)
"}};\n",
"getDocArray":
"var getDocArray = function(name){\n" +
"var out = [];\n" +
"var v = doc[name];\n" +
"if (v!=null && v.value!=null) for(k : v.values) out.add(k);" +
"out;" +
"};\n",
"milli2Month":
"var milli2Month = function(value, milliOffset){\n" +
"g=new java.util.GregorianCalendar(new java.util.SimpleTimeZone(0, \"GMT\"));\n" +
"g.setTimeInMillis(value);\n" +
"g.add(java.util.GregorianCalendar.MILLISECOND, -milliOffset);\n" +
"m = g.get(java.util.GregorianCalendar.MONTH);\n" +
"output = \"\"+g.get(java.util.GregorianCalendar.YEAR)+(m>9?\"\":\"0\")+m;\n" +
"output;\n" +
"};\n",
"between":
"var between = function(value, prefix, suffix){\n" +
"if (value==null){ null; }else{\n" +
"var start = value.indexOf(prefix, 0);\n" +
"if (start==-1){ null; }else{\n" +
"var end = value.indexOf(suffix, start+prefix.length());\n" +
"if (end==-1){ null; }else{\n" +
"value.substring(start+prefix.length(), end);\n" +
"}}}\n" +
"};\n"
}
def replacePrefix(value, prefix, new_prefix):
try:
if value.startswith(prefix):
return new_prefix+value[len(prefix)::]
return value
except Exception as e:
Log.error("can not replace prefix", e)
| klahnakoski/esReplicate | pyLibrary/queries/es09/expressions.py | Python | mpl-2.0 | 26,115 | 0.003178 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from djangocms_carousel import __version__
INSTALL_REQUIRES = [
]
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
]
setup(
name='djangocms-carousel',
version=__version__,
description='Slider Plugin for django CMS',
author='Andrew Mirsky',
author_email='andrew@mirsky.net',
url='https://git.mirsky.net/mirskyconsulting/djangocms-carousel',
packages=['djangocms_carousel', 'djangocms_carousel.migrations'],
install_requires=INSTALL_REQUIRES,
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.md').read(),
include_package_data=True,
zip_safe=False
)
| mirskytech/djangocms-carousel | setup.py | Python | bsd-3-clause | 1,241 | 0 |
{
"name" : "GII",
"version" : "1.0",
"depends" : ['sale','product'],
"author" : "Novasoft Consultancy Services Pvt. Ltd.",
'category' : 'Generic Modules/Others',
"description": """ GII - Management Module
""",
'website': 'http://www.novasoftindia.com',
'data': ['giisa.xml',
],
'demo': [],
'installable': True,
'auto_install': False,
'application': True,
} | Novasoft-India/OperERP-AM-Motors | openerp/addons/giicourse/__openerp__.py | Python | agpl-3.0 | 444 | 0.018018 |
import base64
import io
import os
import os.path
import json
import shlex
import sys
import tarfile
import tempfile
import warnings
from distutils.version import StrictVersion
from datetime import datetime
from fnmatch import fnmatch
import requests
import six
from .. import constants
from .. import errors
from .. import tls
from ..types import Ulimit, LogConfig, Healthcheck
if six.PY2:
from urllib import splitnport
else:
from urllib.parse import splitnport
DEFAULT_HTTP_HOST = "127.0.0.1"
DEFAULT_UNIX_SOCKET = "http+unix://var/run/docker.sock"
DEFAULT_NPIPE = 'npipe:////./pipe/docker_engine'
BYTE_UNITS = {
'b': 1,
'k': 1024,
'm': 1024 * 1024,
'g': 1024 * 1024 * 1024
}
def create_ipam_pool(subnet=None, iprange=None, gateway=None,
aux_addresses=None):
"""
Create an IPAM pool config dictionary to be added to the
``pool_configs`` parameter of
:py:meth:`~docker.utils.create_ipam_config`.
Args:
subnet (str): Custom subnet for this IPAM pool using the CIDR
notation. Defaults to ``None``.
iprange (str): Custom IP range for endpoints in this IPAM pool using
the CIDR notation. Defaults to ``None``.
gateway (str): Custom IP address for the pool's gateway.
aux_addresses (dict): A dictionary of ``key -> ip_address``
relationships specifying auxiliary addresses that need to be
allocated by the IPAM driver.
Returns:
(dict) An IPAM pool config
Example:
>>> ipam_pool = docker.utils.create_ipam_pool(
subnet='124.42.0.0/16',
iprange='124.42.0.0/24',
gateway='124.42.0.254',
aux_addresses={
'reserved1': '124.42.1.1'
}
)
>>> ipam_config = docker.utils.create_ipam_config(
pool_configs=[ipam_pool])
"""
return {
'Subnet': subnet,
'IPRange': iprange,
'Gateway': gateway,
'AuxiliaryAddresses': aux_addresses
}
def create_ipam_config(driver='default', pool_configs=None):
"""
Create an IPAM (IP Address Management) config dictionary to be used with
:py:meth:`~docker.api.network.NetworkApiMixin.create_network`.
Args:
driver (str): The IPAM driver to use. Defaults to ``default``.
pool_configs (list): A list of pool configuration dictionaries as
created by :py:meth:`~docker.utils.create_ipam_pool`. Defaults to
empty list.
Returns:
(dict) An IPAM config.
Example:
>>> ipam_config = docker.utils.create_ipam_config(driver='default')
>>> network = client.create_network('network1', ipam=ipam_config)
"""
return {
'Driver': driver,
'Config': pool_configs or []
}
def mkbuildcontext(dockerfile):
f = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w', fileobj=f)
if isinstance(dockerfile, io.StringIO):
dfinfo = tarfile.TarInfo('Dockerfile')
if six.PY3:
raise TypeError('Please use io.BytesIO to create in-memory '
'Dockerfiles with Python 3')
else:
dfinfo.size = len(dockerfile.getvalue())
dockerfile.seek(0)
elif isinstance(dockerfile, io.BytesIO):
dfinfo = tarfile.TarInfo('Dockerfile')
dfinfo.size = len(dockerfile.getvalue())
dockerfile.seek(0)
else:
dfinfo = t.gettarinfo(fileobj=dockerfile, arcname='Dockerfile')
t.addfile(dfinfo, dockerfile)
t.close()
f.seek(0)
return f
def decode_json_header(header):
data = base64.b64decode(header)
if six.PY3:
data = data.decode('utf-8')
return json.loads(data)
def tar(path, exclude=None, dockerfile=None, fileobj=None, gzip=False):
if not fileobj:
fileobj = tempfile.NamedTemporaryFile()
t = tarfile.open(mode='w:gz' if gzip else 'w', fileobj=fileobj)
root = os.path.abspath(path)
exclude = exclude or []
for path in sorted(exclude_paths(root, exclude, dockerfile=dockerfile)):
i = t.gettarinfo(os.path.join(root, path), arcname=path)
if sys.platform == 'win32':
# Windows doesn't keep track of the execute bit, so we make files
# and directories executable by default.
i.mode = i.mode & 0o755 | 0o111
try:
# We open the file object in binary mode for Windows support.
f = open(os.path.join(root, path), 'rb')
except IOError:
# When we encounter a directory the file object is set to None.
f = None
t.addfile(i, f)
t.close()
fileobj.seek(0)
return fileobj
def exclude_paths(root, patterns, dockerfile=None):
"""
Given a root directory path and a list of .dockerignore patterns, return
an iterator of all paths (both regular files and directories) in the root
directory that do *not* match any of the patterns.
All paths returned are relative to the root.
"""
if dockerfile is None:
dockerfile = 'Dockerfile'
exceptions = [p for p in patterns if p.startswith('!')]
include_patterns = [p[1:] for p in exceptions]
include_patterns += [dockerfile, '.dockerignore']
exclude_patterns = list(set(patterns) - set(exceptions))
paths = get_paths(root, exclude_patterns, include_patterns,
has_exceptions=len(exceptions) > 0)
return set(paths).union(
# If the Dockerfile is in a subdirectory that is excluded, get_paths
# will not descend into it and the file will be skipped. This ensures
# it doesn't happen.
set([dockerfile])
if os.path.exists(os.path.join(root, dockerfile)) else set()
)
def should_include(path, exclude_patterns, include_patterns):
"""
Given a path, a list of exclude patterns, and a list of inclusion patterns:
1. Returns True if the path doesn't match any exclusion pattern
2. Returns False if the path matches an exclusion pattern and doesn't match
an inclusion pattern
3. Returns true if the path matches an exclusion pattern and matches an
inclusion pattern
"""
for pattern in exclude_patterns:
if match_path(path, pattern):
for pattern in include_patterns:
if match_path(path, pattern):
return True
return False
return True
def get_paths(root, exclude_patterns, include_patterns, has_exceptions=False):
paths = []
for parent, dirs, files in os.walk(root, topdown=True, followlinks=False):
parent = os.path.relpath(parent, root)
if parent == '.':
parent = ''
# If exception rules exist, we can't skip recursing into ignored
# directories, as we need to look for exceptions in them.
#
# It may be possible to optimize this further for exception patterns
# that *couldn't* match within ignored directores.
#
# This matches the current docker logic (as of 2015-11-24):
# https://github.com/docker/docker/blob/37ba67bf636b34dc5c0c0265d62a089d0492088f/pkg/archive/archive.go#L555-L557
if not has_exceptions:
# Remove excluded patterns from the list of directories to traverse
# by mutating the dirs we're iterating over.
# This looks strange, but is considered the correct way to skip
# traversal. See https://docs.python.org/2/library/os.html#os.walk
dirs[:] = [d for d in dirs if
should_include(os.path.join(parent, d),
exclude_patterns, include_patterns)]
for path in dirs:
if should_include(os.path.join(parent, path),
exclude_patterns, include_patterns):
paths.append(os.path.join(parent, path))
for path in files:
if should_include(os.path.join(parent, path),
exclude_patterns, include_patterns):
paths.append(os.path.join(parent, path))
return paths
def match_path(path, pattern):
pattern = pattern.rstrip('/')
if pattern:
pattern = os.path.relpath(pattern)
pattern_components = pattern.split(os.path.sep)
path_components = path.split(os.path.sep)[:len(pattern_components)]
return fnmatch('/'.join(path_components), pattern)
def compare_version(v1, v2):
"""Compare docker versions
>>> v1 = '1.9'
>>> v2 = '1.10'
>>> compare_version(v1, v2)
1
>>> compare_version(v2, v1)
-1
>>> compare_version(v2, v2)
0
"""
s1 = StrictVersion(v1)
s2 = StrictVersion(v2)
if s1 == s2:
return 0
elif s1 > s2:
return -1
else:
return 1
def version_lt(v1, v2):
return compare_version(v1, v2) > 0
def version_gte(v1, v2):
return not version_lt(v1, v2)
def ping_registry(url):
warnings.warn(
'The `ping_registry` method is deprecated and will be removed.',
DeprecationWarning
)
return ping(url + '/v2/', [401]) or ping(url + '/v1/_ping')
def ping(url, valid_4xx_statuses=None):
try:
res = requests.get(url, timeout=3)
except Exception:
return False
else:
# We don't send yet auth headers
# and a v2 registry will respond with status 401
return (
res.status_code < 400 or
(valid_4xx_statuses and res.status_code in valid_4xx_statuses)
)
def _convert_port_binding(binding):
result = {'HostIp': '', 'HostPort': ''}
if isinstance(binding, tuple):
if len(binding) == 2:
result['HostPort'] = binding[1]
result['HostIp'] = binding[0]
elif isinstance(binding[0], six.string_types):
result['HostIp'] = binding[0]
else:
result['HostPort'] = binding[0]
elif isinstance(binding, dict):
if 'HostPort' in binding:
result['HostPort'] = binding['HostPort']
if 'HostIp' in binding:
result['HostIp'] = binding['HostIp']
else:
raise ValueError(binding)
else:
result['HostPort'] = binding
if result['HostPort'] is None:
result['HostPort'] = ''
else:
result['HostPort'] = str(result['HostPort'])
return result
def convert_port_bindings(port_bindings):
result = {}
for k, v in six.iteritems(port_bindings):
key = str(k)
if '/' not in key:
key += '/tcp'
if isinstance(v, list):
result[key] = [_convert_port_binding(binding) for binding in v]
else:
result[key] = [_convert_port_binding(v)]
return result
def convert_volume_binds(binds):
if isinstance(binds, list):
return binds
result = []
for k, v in binds.items():
if isinstance(k, six.binary_type):
k = k.decode('utf-8')
if isinstance(v, dict):
if 'ro' in v and 'mode' in v:
raise ValueError(
'Binding cannot contain both "ro" and "mode": {}'
.format(repr(v))
)
bind = v['bind']
if isinstance(bind, six.binary_type):
bind = bind.decode('utf-8')
if 'ro' in v:
mode = 'ro' if v['ro'] else 'rw'
elif 'mode' in v:
mode = v['mode']
else:
mode = 'rw'
result.append(
six.text_type('{0}:{1}:{2}').format(k, bind, mode)
)
else:
if isinstance(v, six.binary_type):
v = v.decode('utf-8')
result.append(
six.text_type('{0}:{1}:rw').format(k, v)
)
return result
def convert_tmpfs_mounts(tmpfs):
if isinstance(tmpfs, dict):
return tmpfs
if not isinstance(tmpfs, list):
raise ValueError(
'Expected tmpfs value to be either a list or a dict, found: {}'
.format(type(tmpfs).__name__)
)
result = {}
for mount in tmpfs:
if isinstance(mount, six.string_types):
if ":" in mount:
name, options = mount.split(":", 1)
else:
name = mount
options = ""
else:
raise ValueError(
"Expected item in tmpfs list to be a string, found: {}"
.format(type(mount).__name__)
)
result[name] = options
return result
def convert_service_networks(networks):
if not networks:
return networks
if not isinstance(networks, list):
raise TypeError('networks parameter must be a list.')
result = []
for n in networks:
if isinstance(n, six.string_types):
n = {'Target': n}
result.append(n)
return result
def parse_repository_tag(repo_name):
parts = repo_name.rsplit('@', 1)
if len(parts) == 2:
return tuple(parts)
parts = repo_name.rsplit(':', 1)
if len(parts) == 2 and '/' not in parts[1]:
return tuple(parts)
return repo_name, None
# Based on utils.go:ParseHost http://tinyurl.com/nkahcfh
# fd:// protocol unsupported (for obvious reasons)
# Added support for http and https
# Protocol translation: tcp -> http, unix -> http+unix
def parse_host(addr, is_win32=False, tls=False):
proto = "http+unix"
port = None
path = ''
if not addr and is_win32:
addr = DEFAULT_NPIPE
if not addr or addr.strip() == 'unix://':
return DEFAULT_UNIX_SOCKET
addr = addr.strip()
if addr.startswith('http://'):
addr = addr.replace('http://', 'tcp://')
if addr.startswith('http+unix://'):
addr = addr.replace('http+unix://', 'unix://')
if addr == 'tcp://':
raise errors.DockerException(
"Invalid bind address format: {0}".format(addr)
)
elif addr.startswith('unix://'):
addr = addr[7:]
elif addr.startswith('tcp://'):
proto = 'http{0}'.format('s' if tls else '')
addr = addr[6:]
elif addr.startswith('https://'):
proto = "https"
addr = addr[8:]
elif addr.startswith('npipe://'):
proto = 'npipe'
addr = addr[8:]
elif addr.startswith('fd://'):
raise errors.DockerException("fd protocol is not implemented")
else:
if "://" in addr:
raise errors.DockerException(
"Invalid bind address protocol: {0}".format(addr)
)
proto = "https" if tls else "http"
if proto in ("http", "https"):
address_parts = addr.split('/', 1)
host = address_parts[0]
if len(address_parts) == 2:
path = '/' + address_parts[1]
host, port = splitnport(host)
if port is None:
raise errors.DockerException(
"Invalid port: {0}".format(addr)
)
if not host:
host = DEFAULT_HTTP_HOST
else:
host = addr
if proto in ("http", "https") and port == -1:
raise errors.DockerException(
"Bind address needs a port: {0}".format(addr))
if proto == "http+unix" or proto == 'npipe':
return "{0}://{1}".format(proto, host).rstrip('/')
return "{0}://{1}:{2}{3}".format(proto, host, port, path).rstrip('/')
def parse_devices(devices):
device_list = []
for device in devices:
if isinstance(device, dict):
device_list.append(device)
continue
if not isinstance(device, six.string_types):
raise errors.DockerException(
'Invalid device type {0}'.format(type(device))
)
device_mapping = device.split(':')
if device_mapping:
path_on_host = device_mapping[0]
if len(device_mapping) > 1:
path_in_container = device_mapping[1]
else:
path_in_container = path_on_host
if len(device_mapping) > 2:
permissions = device_mapping[2]
else:
permissions = 'rwm'
device_list.append({
'PathOnHost': path_on_host,
'PathInContainer': path_in_container,
'CgroupPermissions': permissions
})
return device_list
def kwargs_from_env(ssl_version=None, assert_hostname=None, environment=None):
if not environment:
environment = os.environ
host = environment.get('DOCKER_HOST')
# empty string for cert path is the same as unset.
cert_path = environment.get('DOCKER_CERT_PATH') or None
# empty string for tls verify counts as "false".
# Any value or 'unset' counts as true.
tls_verify = environment.get('DOCKER_TLS_VERIFY')
if tls_verify == '':
tls_verify = False
else:
tls_verify = tls_verify is not None
enable_tls = cert_path or tls_verify
params = {}
if host:
params['base_url'] = (
host.replace('tcp://', 'https://') if enable_tls else host
)
if not enable_tls:
return params
if not cert_path:
cert_path = os.path.join(os.path.expanduser('~'), '.docker')
if not tls_verify and assert_hostname is None:
# assert_hostname is a subset of TLS verification,
# so if it's not set already then set it to false.
assert_hostname = False
params['tls'] = tls.TLSConfig(
client_cert=(os.path.join(cert_path, 'cert.pem'),
os.path.join(cert_path, 'key.pem')),
ca_cert=os.path.join(cert_path, 'ca.pem'),
verify=tls_verify,
ssl_version=ssl_version,
assert_hostname=assert_hostname,
)
return params
def convert_filters(filters):
result = {}
for k, v in six.iteritems(filters):
if isinstance(v, bool):
v = 'true' if v else 'false'
if not isinstance(v, list):
v = [v, ]
result[k] = v
return json.dumps(result)
def datetime_to_timestamp(dt):
"""Convert a UTC datetime to a Unix timestamp"""
delta = dt - datetime.utcfromtimestamp(0)
return delta.seconds + delta.days * 24 * 3600
def parse_bytes(s):
if isinstance(s, six.integer_types + (float,)):
return s
if len(s) == 0:
return 0
if s[-2:-1].isalpha() and s[-1].isalpha():
if s[-1] == "b" or s[-1] == "B":
s = s[:-1]
units = BYTE_UNITS
suffix = s[-1].lower()
# Check if the variable is a string representation of an int
# without a units part. Assuming that the units are bytes.
if suffix.isdigit():
digits_part = s
suffix = 'b'
else:
digits_part = s[:-1]
if suffix in units.keys() or suffix.isdigit():
try:
digits = int(digits_part)
except ValueError:
raise errors.DockerException(
'Failed converting the string value for memory ({0}) to'
' an integer.'.format(digits_part)
)
# Reconvert to long for the final result
s = int(digits * units[suffix])
else:
raise errors.DockerException(
'The specified value for memory ({0}) should specify the'
' units. The postfix should be one of the `b` `k` `m` `g`'
' characters'.format(s)
)
return s
def host_config_type_error(param, param_value, expected):
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
return TypeError(error_msg.format(param, expected, type(param_value)))
def host_config_version_error(param, version, less_than=True):
operator = '<' if less_than else '>'
error_msg = '{0} param is not supported in API versions {1} {2}'
return errors.InvalidVersion(error_msg.format(param, operator, version))
def host_config_value_error(param, param_value):
error_msg = 'Invalid value for {0} param: {1}'
return ValueError(error_msg.format(param, param_value))
def create_host_config(binds=None, port_bindings=None, lxc_conf=None,
publish_all_ports=False, links=None, privileged=False,
dns=None, dns_search=None, volumes_from=None,
network_mode=None, restart_policy=None, cap_add=None,
cap_drop=None, devices=None, extra_hosts=None,
read_only=None, pid_mode=None, ipc_mode=None,
security_opt=None, ulimits=None, log_config=None,
mem_limit=None, memswap_limit=None,
mem_reservation=None, kernel_memory=None,
mem_swappiness=None, cgroup_parent=None,
group_add=None, cpu_quota=None,
cpu_period=None, blkio_weight=None,
blkio_weight_device=None, device_read_bps=None,
device_write_bps=None, device_read_iops=None,
device_write_iops=None, oom_kill_disable=False,
shm_size=None, sysctls=None, version=None, tmpfs=None,
oom_score_adj=None, dns_opt=None, cpu_shares=None,
cpuset_cpus=None, userns_mode=None, pids_limit=None,
isolation=None):
host_config = {}
if not version:
warnings.warn(
'docker.utils.create_host_config() is deprecated. Please use '
'Client.create_host_config() instead.'
)
version = constants.DEFAULT_DOCKER_API_VERSION
if mem_limit is not None:
host_config['Memory'] = parse_bytes(mem_limit)
if memswap_limit is not None:
host_config['MemorySwap'] = parse_bytes(memswap_limit)
if mem_reservation:
if version_lt(version, '1.21'):
raise host_config_version_error('mem_reservation', '1.21')
host_config['MemoryReservation'] = parse_bytes(mem_reservation)
if kernel_memory:
if version_lt(version, '1.21'):
raise host_config_version_error('kernel_memory', '1.21')
host_config['KernelMemory'] = parse_bytes(kernel_memory)
if mem_swappiness is not None:
if version_lt(version, '1.20'):
raise host_config_version_error('mem_swappiness', '1.20')
if not isinstance(mem_swappiness, int):
raise host_config_type_error(
'mem_swappiness', mem_swappiness, 'int'
)
host_config['MemorySwappiness'] = mem_swappiness
if shm_size is not None:
if isinstance(shm_size, six.string_types):
shm_size = parse_bytes(shm_size)
host_config['ShmSize'] = shm_size
if pid_mode not in (None, 'host'):
raise host_config_value_error('pid_mode', pid_mode)
elif pid_mode:
host_config['PidMode'] = pid_mode
if ipc_mode:
host_config['IpcMode'] = ipc_mode
if privileged:
host_config['Privileged'] = privileged
if oom_kill_disable:
if version_lt(version, '1.20'):
raise host_config_version_error('oom_kill_disable', '1.19')
host_config['OomKillDisable'] = oom_kill_disable
if oom_score_adj:
if version_lt(version, '1.22'):
raise host_config_version_error('oom_score_adj', '1.22')
if not isinstance(oom_score_adj, int):
raise host_config_type_error(
'oom_score_adj', oom_score_adj, 'int'
)
host_config['OomScoreAdj'] = oom_score_adj
if publish_all_ports:
host_config['PublishAllPorts'] = publish_all_ports
if read_only is not None:
host_config['ReadonlyRootfs'] = read_only
if dns_search:
host_config['DnsSearch'] = dns_search
if network_mode:
host_config['NetworkMode'] = network_mode
elif network_mode is None and compare_version('1.19', version) > 0:
host_config['NetworkMode'] = 'default'
if restart_policy:
if not isinstance(restart_policy, dict):
raise host_config_type_error(
'restart_policy', restart_policy, 'dict'
)
host_config['RestartPolicy'] = restart_policy
if cap_add:
host_config['CapAdd'] = cap_add
if cap_drop:
host_config['CapDrop'] = cap_drop
if devices:
host_config['Devices'] = parse_devices(devices)
if group_add:
if version_lt(version, '1.20'):
raise host_config_version_error('group_add', '1.20')
host_config['GroupAdd'] = [six.text_type(grp) for grp in group_add]
if dns is not None:
host_config['Dns'] = dns
if dns_opt is not None:
if version_lt(version, '1.21'):
raise host_config_version_error('dns_opt', '1.21')
host_config['DnsOptions'] = dns_opt
if security_opt is not None:
if not isinstance(security_opt, list):
raise host_config_type_error('security_opt', security_opt, 'list')
host_config['SecurityOpt'] = security_opt
if sysctls:
if not isinstance(sysctls, dict):
raise host_config_type_error('sysctls', sysctls, 'dict')
host_config['Sysctls'] = {}
for k, v in six.iteritems(sysctls):
host_config['Sysctls'][k] = six.text_type(v)
if volumes_from is not None:
if isinstance(volumes_from, six.string_types):
volumes_from = volumes_from.split(',')
host_config['VolumesFrom'] = volumes_from
if binds is not None:
host_config['Binds'] = convert_volume_binds(binds)
if port_bindings is not None:
host_config['PortBindings'] = convert_port_bindings(port_bindings)
if extra_hosts is not None:
if isinstance(extra_hosts, dict):
extra_hosts = [
'{0}:{1}'.format(k, v)
for k, v in sorted(six.iteritems(extra_hosts))
]
host_config['ExtraHosts'] = extra_hosts
if links is not None:
host_config['Links'] = normalize_links(links)
if isinstance(lxc_conf, dict):
formatted = []
for k, v in six.iteritems(lxc_conf):
formatted.append({'Key': k, 'Value': str(v)})
lxc_conf = formatted
if lxc_conf is not None:
host_config['LxcConf'] = lxc_conf
if cgroup_parent is not None:
host_config['CgroupParent'] = cgroup_parent
if ulimits is not None:
if not isinstance(ulimits, list):
raise host_config_type_error('ulimits', ulimits, 'list')
host_config['Ulimits'] = []
for l in ulimits:
if not isinstance(l, Ulimit):
l = Ulimit(**l)
host_config['Ulimits'].append(l)
if log_config is not None:
if not isinstance(log_config, LogConfig):
if not isinstance(log_config, dict):
raise host_config_type_error(
'log_config', log_config, 'LogConfig'
)
log_config = LogConfig(**log_config)
host_config['LogConfig'] = log_config
if cpu_quota:
if not isinstance(cpu_quota, int):
raise host_config_type_error('cpu_quota', cpu_quota, 'int')
if version_lt(version, '1.19'):
raise host_config_version_error('cpu_quota', '1.19')
host_config['CpuQuota'] = cpu_quota
if cpu_period:
if not isinstance(cpu_period, int):
raise host_config_type_error('cpu_period', cpu_period, 'int')
if version_lt(version, '1.19'):
raise host_config_version_error('cpu_period', '1.19')
host_config['CpuPeriod'] = cpu_period
if cpu_shares:
if version_lt(version, '1.18'):
raise host_config_version_error('cpu_shares', '1.18')
if not isinstance(cpu_shares, int):
raise host_config_type_error('cpu_shares', cpu_shares, 'int')
host_config['CpuShares'] = cpu_shares
if cpuset_cpus:
if version_lt(version, '1.18'):
raise host_config_version_error('cpuset_cpus', '1.18')
host_config['CpuSetCpus'] = cpuset_cpus
if blkio_weight:
if not isinstance(blkio_weight, int):
raise host_config_type_error('blkio_weight', blkio_weight, 'int')
if version_lt(version, '1.22'):
raise host_config_version_error('blkio_weight', '1.22')
host_config["BlkioWeight"] = blkio_weight
if blkio_weight_device:
if not isinstance(blkio_weight_device, list):
raise host_config_type_error(
'blkio_weight_device', blkio_weight_device, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('blkio_weight_device', '1.22')
host_config["BlkioWeightDevice"] = blkio_weight_device
if device_read_bps:
if not isinstance(device_read_bps, list):
raise host_config_type_error(
'device_read_bps', device_read_bps, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_read_bps', '1.22')
host_config["BlkioDeviceReadBps"] = device_read_bps
if device_write_bps:
if not isinstance(device_write_bps, list):
raise host_config_type_error(
'device_write_bps', device_write_bps, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_write_bps', '1.22')
host_config["BlkioDeviceWriteBps"] = device_write_bps
if device_read_iops:
if not isinstance(device_read_iops, list):
raise host_config_type_error(
'device_read_iops', device_read_iops, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_read_iops', '1.22')
host_config["BlkioDeviceReadIOps"] = device_read_iops
if device_write_iops:
if not isinstance(device_write_iops, list):
raise host_config_type_error(
'device_write_iops', device_write_iops, 'list'
)
if version_lt(version, '1.22'):
raise host_config_version_error('device_write_iops', '1.22')
host_config["BlkioDeviceWriteIOps"] = device_write_iops
if tmpfs:
if version_lt(version, '1.22'):
raise host_config_version_error('tmpfs', '1.22')
host_config["Tmpfs"] = convert_tmpfs_mounts(tmpfs)
if userns_mode:
if version_lt(version, '1.23'):
raise host_config_version_error('userns_mode', '1.23')
if userns_mode != "host":
raise host_config_value_error("userns_mode", userns_mode)
host_config['UsernsMode'] = userns_mode
if pids_limit:
if not isinstance(pids_limit, int):
raise host_config_type_error('pids_limit', pids_limit, 'int')
if version_lt(version, '1.23'):
raise host_config_version_error('pids_limit', '1.23')
host_config["PidsLimit"] = pids_limit
if isolation:
if not isinstance(isolation, six.string_types):
raise host_config_type_error('isolation', isolation, 'string')
if version_lt(version, '1.24'):
raise host_config_version_error('isolation', '1.24')
host_config['Isolation'] = isolation
return host_config
def normalize_links(links):
if isinstance(links, dict):
links = six.iteritems(links)
return ['{0}:{1}'.format(k, v) for k, v in sorted(links)]
def create_networking_config(endpoints_config=None):
networking_config = {}
if endpoints_config:
networking_config["EndpointsConfig"] = endpoints_config
return networking_config
def create_endpoint_config(version, aliases=None, links=None,
ipv4_address=None, ipv6_address=None,
link_local_ips=None):
if version_lt(version, '1.22'):
raise errors.InvalidVersion(
'Endpoint config is not supported for API version < 1.22'
)
endpoint_config = {}
if aliases:
endpoint_config["Aliases"] = aliases
if links:
endpoint_config["Links"] = normalize_links(links)
ipam_config = {}
if ipv4_address:
ipam_config['IPv4Address'] = ipv4_address
if ipv6_address:
ipam_config['IPv6Address'] = ipv6_address
if link_local_ips is not None:
if version_lt(version, '1.24'):
raise errors.InvalidVersion(
'link_local_ips is not supported for API version < 1.24'
)
ipam_config['LinkLocalIPs'] = link_local_ips
if ipam_config:
endpoint_config['IPAMConfig'] = ipam_config
return endpoint_config
def parse_env_file(env_file):
"""
Reads a line-separated environment file.
The format of each line should be "key=value".
"""
environment = {}
with open(env_file, 'r') as f:
for line in f:
if line[0] == '#':
continue
parse_line = line.strip().split('=', 1)
if len(parse_line) == 2:
k, v = parse_line
environment[k] = v
else:
raise errors.DockerException(
'Invalid line in environment file {0}:\n{1}'.format(
env_file, line))
return environment
def split_command(command):
if six.PY2 and not isinstance(command, six.binary_type):
command = command.encode('utf-8')
return shlex.split(command)
def format_environment(environment):
def format_env(key, value):
if value is None:
return key
if isinstance(value, six.binary_type):
value = value.decode('utf-8')
return u'{key}={value}'.format(key=key, value=value)
return [format_env(*var) for var in six.iteritems(environment)]
def create_container_config(
version, image, command, hostname=None, user=None, detach=False,
stdin_open=False, tty=False, mem_limit=None, ports=None, environment=None,
dns=None, volumes=None, volumes_from=None, network_disabled=False,
entrypoint=None, cpu_shares=None, working_dir=None, domainname=None,
memswap_limit=None, cpuset=None, host_config=None, mac_address=None,
labels=None, volume_driver=None, stop_signal=None, networking_config=None,
healthcheck=None,
):
if isinstance(command, six.string_types):
command = split_command(command)
if isinstance(entrypoint, six.string_types):
entrypoint = split_command(entrypoint)
if isinstance(environment, dict):
environment = format_environment(environment)
if labels is not None and compare_version('1.18', version) < 0:
raise errors.InvalidVersion(
'labels were only introduced in API version 1.18'
)
if cpuset is not None or cpu_shares is not None:
if version_gte(version, '1.18'):
warnings.warn(
'The cpuset_cpus and cpu_shares options have been moved to '
'host_config in API version 1.18, and will be removed',
DeprecationWarning
)
if stop_signal is not None and compare_version('1.21', version) < 0:
raise errors.InvalidVersion(
'stop_signal was only introduced in API version 1.21'
)
if healthcheck is not None and version_lt(version, '1.24'):
raise errors.InvalidVersion(
'Health options were only introduced in API version 1.24'
)
if compare_version('1.19', version) < 0:
if volume_driver is not None:
raise errors.InvalidVersion(
'Volume drivers were only introduced in API version 1.19'
)
mem_limit = mem_limit if mem_limit is not None else 0
memswap_limit = memswap_limit if memswap_limit is not None else 0
else:
if mem_limit is not None:
raise errors.InvalidVersion(
'mem_limit has been moved to host_config in API version 1.19'
)
if memswap_limit is not None:
raise errors.InvalidVersion(
'memswap_limit has been moved to host_config in API '
'version 1.19'
)
if isinstance(labels, list):
labels = dict((lbl, six.text_type('')) for lbl in labels)
if mem_limit is not None:
mem_limit = parse_bytes(mem_limit)
if memswap_limit is not None:
memswap_limit = parse_bytes(memswap_limit)
if isinstance(ports, list):
exposed_ports = {}
for port_definition in ports:
port = port_definition
proto = 'tcp'
if isinstance(port_definition, tuple):
if len(port_definition) == 2:
proto = port_definition[1]
port = port_definition[0]
exposed_ports['{0}/{1}'.format(port, proto)] = {}
ports = exposed_ports
if isinstance(volumes, six.string_types):
volumes = [volumes, ]
if isinstance(volumes, list):
volumes_dict = {}
for vol in volumes:
volumes_dict[vol] = {}
volumes = volumes_dict
if volumes_from:
if not isinstance(volumes_from, six.string_types):
volumes_from = ','.join(volumes_from)
else:
# Force None, an empty list or dict causes client.start to fail
volumes_from = None
if healthcheck and isinstance(healthcheck, dict):
healthcheck = Healthcheck(**healthcheck)
attach_stdin = False
attach_stdout = False
attach_stderr = False
stdin_once = False
if not detach:
attach_stdout = True
attach_stderr = True
if stdin_open:
attach_stdin = True
stdin_once = True
if compare_version('1.10', version) >= 0:
message = ('{0!r} parameter has no effect on create_container().'
' It has been moved to host_config')
if dns is not None:
raise errors.InvalidVersion(message.format('dns'))
if volumes_from is not None:
raise errors.InvalidVersion(message.format('volumes_from'))
return {
'Hostname': hostname,
'Domainname': domainname,
'ExposedPorts': ports,
'User': six.text_type(user) if user else None,
'Tty': tty,
'OpenStdin': stdin_open,
'StdinOnce': stdin_once,
'Memory': mem_limit,
'AttachStdin': attach_stdin,
'AttachStdout': attach_stdout,
'AttachStderr': attach_stderr,
'Env': environment,
'Cmd': command,
'Dns': dns,
'Image': image,
'Volumes': volumes,
'VolumesFrom': volumes_from,
'NetworkDisabled': network_disabled,
'Entrypoint': entrypoint,
'CpuShares': cpu_shares,
'Cpuset': cpuset,
'CpusetCpus': cpuset,
'WorkingDir': working_dir,
'MemorySwap': memswap_limit,
'HostConfig': host_config,
'NetworkingConfig': networking_config,
'MacAddress': mac_address,
'Labels': labels,
'VolumeDriver': volume_driver,
'StopSignal': stop_signal,
'Healthcheck': healthcheck,
}
| bfirsh/docker-py | docker/utils/utils.py | Python | apache-2.0 | 39,231 | 0.000051 |
from pupa.scrape import Person, Scraper
from openstates.utils import LXMLMixin
class VIPersonScraper(Scraper, LXMLMixin):
def scrape(self, chamber, term):
pass
yield Person()
# home_url = 'http://www.legvi.org/'
# doc = lxml.html.fromstring(self.get(url=home_url).text)
# USVI offers name, island, and biography, but contact info is locked up in a PDF
# //*[@id="sp-main-menu"]/ul/li[2]/div/div/div/div/ul/li/div/div/ul/li/a/span/span
| openstates/openstates | openstates/vi/legislators.py | Python | gpl-3.0 | 490 | 0.002041 |
KRONOS_MIDDLEWARE = {
'host': 'http://localhost:9191/',
'stream': 'django_middleware',
'blocking': True,
'log_exception_stack_trace': True
}
| Locu/chronology | pykronos/tests/conf/django_settings.py | Python | mit | 149 | 0 |
# Query Github public timeline using Bigquery and display top new repositories
# Modified from sources
## https://developers.google.com/bigquery/bigquery-api-quickstart#completecode
## https://gist.github.com/igrigorik/f8742314320e0a4b1a89
import httplib2
import pprint
import sys
import time
import json
import logging
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient import errors
from pprint import pprint
from oauth2client.client import SignedJwtAssertionCredentials
from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import run
#Debug
# https://developers.google.com/api-client-library/python/guide/logging
#httplib2.debuglevel = 4
#logger = logging.getLogger()
#logger.setLevel(logging.INFO)
def main():
PROJECT_NUMBER = 'xxx' #TODO: Add project number
SERVICE_ACCOUNT_EMAIL = 'xxx@developer.gserviceaccount.com' #TODO: Add service account
f = file('xxx-key.p12', 'rb') #TODO: Add key
key = f.read()
f.close()
credentials = SignedJwtAssertionCredentials(
SERVICE_ACCOUNT_EMAIL,
key,
scope='https://www.googleapis.com/auth/bigquery.readonly')
http = httplib2.Http()
http = credentials.authorize(http)
bigquery_service = build('bigquery', 'v2', http=http)
#https://developers.google.com/bigquery/docs/reference/v2/jobs/query
#https://code.google.com/p/python-sqlparse/
#http://sqlformat.org/
#TODO: Change timestamp
try:
query_request = bigquery_service.jobs()
query_data = {
"kind": "bigquery#job",
'query': 'SELECT repository_url, repository_language, COUNT(repository_name) AS cnt, \
FROM githubarchive:github.timeline \
WHERE TYPE="WatchEvent" \
AND PARSE_UTC_USEC(created_at) >= PARSE_UTC_USEC("2014-08-15 00:00:00") \
AND repository_url IN \
(SELECT repository_url \
FROM githubarchive:github.timeline \
WHERE TYPE="CreateEvent" \
AND PARSE_UTC_USEC(repository_created_at) >= PARSE_UTC_USEC("2014-08-15 00:00:00") \
AND repository_fork = "false" \
AND payload_ref_type = "repository" \
GROUP BY repository_url) \
GROUP BY repository_name, \
repository_language, \
repository_description, \
repository_url HAVING cnt >= 5 \
ORDER BY cnt DESC LIMIT 5;',
"useQueryCache": "False" # True or False
}
#Trigger on-demand query
#Quota & Policy info https://developers.google.com/bigquery/quota-policy
query_response = query_request.query(projectId=PROJECT_NUMBER,body=query_data).execute()
#Did the bigquery get processed?
if ((query_response['jobComplete']) and (int(query_response['totalRows']) >1) and (int(query_response['totalBytesProcessed']) > 0 )):
#Store result for further analysis
with open( 'toprepositories.json', 'w' ) as outfile:
json.dump( query_response,outfile)
#Print results
print "Top Repositories in Github"
for row in query_response['rows']:
result_row = []
for field in row['f']:
result_row.append(field['v'])
print('\t'.join(map(str,result_row)))
else:
print "Ignore: jobComplete=%s \t totalRows=%s \t totalBytesProcessed=%s" % (query_response['jobComplete'],query_response['totalRows'], query_response['totalBytesProcessed'])
except HttpError as err:
print "Error:", pprint(err.content)
except AccessTokenRefreshError:
print "Token Error: Credentials have been revoked or expired"
if __name__ == '__main__':
main()
| harishvc/tools | bigquery-github/TopGithubRepos.py | Python | mit | 4,367 | 0.0158 |
# -*- coding: utf-8 -*-
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of Unix-like rm command for cloud storage providers."""
from __future__ import absolute_import
import time
from gslib.cloud_api import BucketNotFoundException
from gslib.cloud_api import NotEmptyException
from gslib.cloud_api import NotFoundException
from gslib.cloud_api import ServiceException
from gslib.command import Command
from gslib.command import DecrementFailureCount
from gslib.command_argument import CommandArgument
from gslib.cs_api_map import ApiSelector
from gslib.exception import CommandException
from gslib.exception import NO_URLS_MATCHED_GENERIC
from gslib.exception import NO_URLS_MATCHED_TARGET
from gslib.name_expansion import NameExpansionIterator
from gslib.name_expansion import SeekAheadNameExpansionIterator
from gslib.parallelism_framework_util import PutToQueueWithTimeout
from gslib.storage_url import StorageUrlFromString
from gslib.thread_message import MetadataMessage
from gslib.translation_helper import PreconditionsFromHeaders
from gslib.util import GetCloudApiInstance
from gslib.util import NO_MAX
from gslib.util import Retry
from gslib.util import StdinIterator
_SYNOPSIS = """
gsutil rm [-f] [-r] url...
gsutil rm [-f] [-r] -I
"""
_DETAILED_HELP_TEXT = ("""
<B>SYNOPSIS</B>
""" + _SYNOPSIS + """
<B>DESCRIPTION</B>
The gsutil rm command removes objects.
For example, the command:
gsutil rm gs://bucket/subdir/*
will remove all objects in gs://bucket/subdir, but not in any of its
sub-directories. In contrast:
gsutil rm gs://bucket/subdir/**
will remove all objects under gs://bucket/subdir or any of its
subdirectories.
You can also use the -r option to specify recursive object deletion. Thus, for
example, either of the following two commands will remove gs://bucket/subdir
and all objects and subdirectories under it:
gsutil rm gs://bucket/subdir**
gsutil rm -r gs://bucket/subdir
The -r option will also delete all object versions in the subdirectory for
versioning-enabled buckets, whereas the ** command will only delete the live
version of each object in the subdirectory.
Running gsutil rm -r on a bucket will delete all versions of all objects in
the bucket, and then delete the bucket:
gsutil rm -r gs://bucket
If you want to delete all objects in the bucket, but not the bucket itself,
this command will work:
gsutil rm gs://bucket/**
If you have a large number of objects to remove you might want to use the
gsutil -m option, to perform parallel (multi-threaded/multi-processing)
removes:
gsutil -m rm -r gs://my_bucket/subdir
You can pass a list of URLs (one per line) to remove on stdin instead of as
command line arguments by using the -I option. This allows you to use gsutil
in a pipeline to remove objects identified by a program, such as:
some_program | gsutil -m rm -I
The contents of stdin can name cloud URLs and wildcards of cloud URLs.
Note that gsutil rm will refuse to remove files from the local
file system. For example this will fail:
gsutil rm *.txt
WARNING: Object removal cannot be undone. Google Cloud Storage is designed
to give developers a high amount of flexibility and control over their data,
and Google maintains strict controls over the processing and purging of
deleted data. To protect yourself from mistakes, you can configure object
versioning on your bucket(s). See 'gsutil help versions' for details.
<B>DATA RESTORATION FROM ACCIDENTAL DELETION OR OVERWRITES</B>
Google Cloud Storage does not provide support for restoring data lost
or overwritten due to customer errors. If you have concerns that your
application software (or your users) may at some point erroneously delete or
overwrite data, you can protect yourself from that risk by enabling Object
Versioning (see "gsutil help versioning"). Doing so increases storage costs,
which can be partially mitigated by configuring Lifecycle Management to delete
older object versions (see "gsutil help lifecycle").
<B>OPTIONS</B>
-f Continues silently (without printing error messages) despite
errors when removing multiple objects. If some of the objects
could not be removed, gsutil's exit status will be non-zero even
if this flag is set. Execution will still halt if an inaccessible
bucket is encountered. This option is implicitly set when running
"gsutil -m rm ...".
-I Causes gsutil to read the list of objects to remove from stdin.
This allows you to run a program that generates the list of
objects to remove.
-R, -r The -R and -r options are synonymous. Causes bucket or bucket
subdirectory contents (all objects and subdirectories that it
contains) to be removed recursively. If used with a bucket-only
URL (like gs://bucket), after deleting objects and subdirectories
gsutil will delete the bucket. This option implies the -a option
and will delete all object versions.
-a Delete all versions of an object.
""")
def _RemoveExceptionHandler(cls, e):
"""Simple exception handler to allow post-completion status."""
if not cls.continue_on_error:
cls.logger.error(str(e))
# TODO: Use shared state to track missing bucket names when we get a
# BucketNotFoundException. Then improve bucket removal logic and exception
# messages.
if isinstance(e, BucketNotFoundException):
cls.bucket_not_found_count += 1
cls.logger.error(str(e))
else:
if _ExceptionMatchesBucketToDelete(cls.bucket_strings_to_delete, e):
DecrementFailureCount()
else:
cls.op_failure_count += 1
# pylint: disable=unused-argument
def _RemoveFoldersExceptionHandler(cls, e):
"""When removing folders, we don't mind if none exist."""
if ((isinstance(e, CommandException) and
NO_URLS_MATCHED_GENERIC in e.reason)
or isinstance(e, NotFoundException)):
DecrementFailureCount()
else:
raise e
def _RemoveFuncWrapper(cls, name_expansion_result, thread_state=None):
cls.RemoveFunc(name_expansion_result, thread_state=thread_state)
def _ExceptionMatchesBucketToDelete(bucket_strings_to_delete, e):
"""Returns True if the exception matches a bucket slated for deletion.
A recursive delete call on an empty bucket will raise an exception when
listing its objects, but if we plan to delete the bucket that shouldn't
result in a user-visible error.
Args:
bucket_strings_to_delete: Buckets slated for recursive deletion.
e: Exception to check.
Returns:
True if the exception was a no-URLs-matched exception and it matched
one of bucket_strings_to_delete, None otherwise.
"""
if bucket_strings_to_delete:
msg = NO_URLS_MATCHED_TARGET % ''
if msg in str(e):
parts = str(e).split(msg)
return len(parts) == 2 and parts[1] in bucket_strings_to_delete
class RmCommand(Command):
"""Implementation of gsutil rm command."""
# Command specification. See base class for documentation.
command_spec = Command.CreateCommandSpec(
'rm',
command_name_aliases=['del', 'delete', 'remove'],
usage_synopsis=_SYNOPSIS,
min_args=0,
max_args=NO_MAX,
supported_sub_args='afIrR',
file_url_ok=False,
provider_url_ok=False,
urls_start_arg=0,
gs_api_support=[ApiSelector.XML, ApiSelector.JSON],
gs_default_api=ApiSelector.JSON,
argparse_arguments=[
CommandArgument.MakeZeroOrMoreCloudURLsArgument()
]
)
# Help specification. See help_provider.py for documentation.
help_spec = Command.HelpSpec(
help_name='rm',
help_name_aliases=['del', 'delete', 'remove'],
help_type='command_help',
help_one_line_summary='Remove objects',
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={},
)
def RunCommand(self):
"""Command entry point for the rm command."""
# self.recursion_requested is initialized in command.py (so it can be
# checked in parent class for all commands).
self.continue_on_error = self.parallel_operations
self.read_args_from_stdin = False
self.all_versions = False
if self.sub_opts:
for o, unused_a in self.sub_opts:
if o == '-a':
self.all_versions = True
elif o == '-f':
self.continue_on_error = True
elif o == '-I':
self.read_args_from_stdin = True
elif o == '-r' or o == '-R':
self.recursion_requested = True
self.all_versions = True
if self.read_args_from_stdin:
if self.args:
raise CommandException('No arguments allowed with the -I flag.')
url_strs = StdinIterator()
else:
if not self.args:
raise CommandException('The rm command (without -I) expects at '
'least one URL.')
url_strs = self.args
# Tracks number of object deletes that failed.
self.op_failure_count = 0
# Tracks if any buckets were missing.
self.bucket_not_found_count = 0
# Tracks buckets that are slated for recursive deletion.
bucket_urls_to_delete = []
self.bucket_strings_to_delete = []
if self.recursion_requested:
bucket_fields = ['id']
for url_str in url_strs:
url = StorageUrlFromString(url_str)
if url.IsBucket() or url.IsProvider():
for blr in self.WildcardIterator(url_str).IterBuckets(
bucket_fields=bucket_fields):
bucket_urls_to_delete.append(blr.storage_url)
self.bucket_strings_to_delete.append(url_str)
self.preconditions = PreconditionsFromHeaders(self.headers or {})
try:
# Expand wildcards, dirs, buckets, and bucket subdirs in URLs.
name_expansion_iterator = NameExpansionIterator(
self.command_name, self.debug, self.logger, self.gsutil_api,
url_strs, self.recursion_requested, project_id=self.project_id,
all_versions=self.all_versions,
continue_on_error=self.continue_on_error or self.parallel_operations)
seek_ahead_iterator = None
# Cannot seek ahead with stdin args, since we can only iterate them
# once without buffering in memory.
if not self.read_args_from_stdin:
seek_ahead_iterator = SeekAheadNameExpansionIterator(
self.command_name, self.debug, self.GetSeekAheadGsutilApi(),
url_strs, self.recursion_requested,
all_versions=self.all_versions, project_id=self.project_id)
# Perform remove requests in parallel (-m) mode, if requested, using
# configured number of parallel processes and threads. Otherwise,
# perform requests with sequential function calls in current process.
self.Apply(_RemoveFuncWrapper, name_expansion_iterator,
_RemoveExceptionHandler,
fail_on_error=(not self.continue_on_error),
shared_attrs=['op_failure_count', 'bucket_not_found_count'],
seek_ahead_iterator=seek_ahead_iterator)
# Assuming the bucket has versioning enabled, url's that don't map to
# objects should throw an error even with all_versions, since the prior
# round of deletes only sends objects to a history table.
# This assumption that rm -a is only called for versioned buckets should be
# corrected, but the fix is non-trivial.
except CommandException as e:
# Don't raise if there are buckets to delete -- it's valid to say:
# gsutil rm -r gs://some_bucket
# if the bucket is empty.
if _ExceptionMatchesBucketToDelete(self.bucket_strings_to_delete, e):
DecrementFailureCount()
else:
raise
except ServiceException, e:
if not self.continue_on_error:
raise
if self.bucket_not_found_count:
raise CommandException('Encountered non-existent bucket during listing')
if self.op_failure_count and not self.continue_on_error:
raise CommandException('Some files could not be removed.')
# If this was a gsutil rm -r command covering any bucket subdirs,
# remove any dir_$folder$ objects (which are created by various web UI
# tools to simulate folders).
if self.recursion_requested:
folder_object_wildcards = []
for url_str in url_strs:
url = StorageUrlFromString(url_str)
if url.IsObject():
folder_object_wildcards.append('%s**_$folder$' % url_str)
if folder_object_wildcards:
self.continue_on_error = True
try:
name_expansion_iterator = NameExpansionIterator(
self.command_name, self.debug,
self.logger, self.gsutil_api,
folder_object_wildcards, self.recursion_requested,
project_id=self.project_id,
all_versions=self.all_versions)
# When we're removing folder objects, always continue on error
self.Apply(_RemoveFuncWrapper, name_expansion_iterator,
_RemoveFoldersExceptionHandler,
fail_on_error=False)
except CommandException as e:
# Ignore exception from name expansion due to an absent folder file.
if not e.reason.startswith(NO_URLS_MATCHED_GENERIC):
raise
# Now that all data has been deleted, delete any bucket URLs.
for url in bucket_urls_to_delete:
self.logger.info('Removing %s...', url)
@Retry(NotEmptyException, tries=3, timeout_secs=1)
def BucketDeleteWithRetry():
self.gsutil_api.DeleteBucket(url.bucket_name, provider=url.scheme)
BucketDeleteWithRetry()
if self.op_failure_count:
plural_str = 's' if self.op_failure_count else ''
raise CommandException('%d file%s/object%s could not be removed.' % (
self.op_failure_count, plural_str, plural_str))
return 0
def RemoveFunc(self, name_expansion_result, thread_state=None):
gsutil_api = GetCloudApiInstance(self, thread_state=thread_state)
exp_src_url = name_expansion_result.expanded_storage_url
self.logger.info('Removing %s...', exp_src_url)
gsutil_api.DeleteObject(
exp_src_url.bucket_name, exp_src_url.object_name,
preconditions=self.preconditions, generation=exp_src_url.generation,
provider=exp_src_url.scheme)
PutToQueueWithTimeout(gsutil_api.status_queue,
MetadataMessage(message_time=time.time()))
| fishjord/gsutil | gslib/commands/rm.py | Python | apache-2.0 | 15,054 | 0.004849 |
from django.conf import settings
from django.core.management.base import BaseCommand
from proso_common.models import CustomConfig
import os.path
import yaml
from django.db import transaction
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'-f',
'--file',
dest='filename',
default=os.path.join(settings.BASE_DIR, 'proso_custom_config.yaml')
),
)
def handle(self, *args, **options):
with transaction.atomic():
CustomConfig.objects.filter(user_id=None).delete()
with open(options['filename'], 'r', encoding='utf8') as f:
for app_name, keys in yaml.load(f).items():
for key, records in keys.items():
for record in records:
CustomConfig.objects.try_create(
app_name,
key,
record['value'],
user_id=None,
condition_key=record['condition_key'],
condition_value=record['condition_value']
)
| adaptive-learning/proso-apps | proso_common/management/commands/load_global_custom_config.py | Python | mit | 1,269 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:et:
# Simple debugging module
import os
import inspect
from .config import OPTIONS
__all__ = ["__DEBUG__", "__LINE__", "__FILE__"]
# --------------------- END OF GLOBAL FLAGS ---------------------
def __DEBUG__(msg, level=1):
if level > OPTIONS.debug_level:
return
line = inspect.getouterframes(inspect.currentframe())[1][2]
fname = os.path.basename(inspect.getouterframes(inspect.currentframe())[1][1])
OPTIONS.stderr.write("debug: %s:%i %s\n" % (fname, line, msg))
def __LINE__():
"""Returns current file interpreter line"""
return inspect.getouterframes(inspect.currentframe())[1][2]
def __FILE__():
"""Returns current file interpreter line"""
return inspect.currentframe().f_code.co_filename
| boriel/zxbasic | src/api/debug.py | Python | gpl-3.0 | 810 | 0.001235 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def port_models(apps, schema_editor):
Proposal = apps.get_model('core', 'Proposal')
Notice = apps.get_model('core', 'Notice')
n = Notice()
n.title = "Edital"
n.description = "Edital info"
n.save()
for p in Proposal.objects.all():
p.notice = n
p.save()
def reverse_port_models(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [
('core', '0003_proposaldate'),
]
operations = [
migrations.CreateModel(
name='Notice',
fields=[
('id', models.AutoField(serialize=False, primary_key=True)),
('title', models.CharField(max_length=60)),
('description', models.CharField(max_length=500)),
('is_available', models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name='proposal',
name='notice',
field=models.ForeignKey(related_name='proposals', to='core.Notice', null=True),
),
migrations.RunPython(port_models, reverse_port_models),
]
| hackultura/procult | procult/core/migrations/0004_auto_20160905_0938.py | Python | gpl-2.0 | 1,233 | 0.001622 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# kate: space-indent on; indent-width 4; mixedindent off; indent-mode python;
"""This module provides a general interface to EFI variables using platform-
specific methods. Current Windows and Linux (with sysfs and efivars) are
supported.
Under Windows the pywin32 extensions are required.
"""
import os.path
import platform
import re
if platform.system() == 'Windows':
import ctypes
import win32api, win32process, win32security
class EfiVariables(object):
"""Abstract EFI variable access class.
Use get_instance to create an instance for the current operating system."""
def read(self, name, guid):
raise NotImplementedError
def write(self, name, guid, value):
raise NotImplementedError
@classmethod
def get_instance(cls):
if platform.system() == 'Windows':
return WinApiEfiVariables()
elif platform.system() == 'Linux':
return SysfsEfiVariables()
else:
raise Exception("Unknown or unsupported operating system.")
class SysfsEfiVariables(EfiVariables):
"""EFI variable access for all platforms supporting /sys/firmware/efi/vars, e.g. Linux via efi_vars"""
sysfs_efi_vars_dir = '/sys/firmware/efi/vars'
@staticmethod
def read_efi_value(fname):
ret = None
try:
with open(fname, 'rb') as f:
ret = f.read()
except (IOError, OSError):
pass
return ret
def read(self, name, guid):
assert re.match(r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$", guid)
filename = self.sysfs_efi_vars_dir + "/%s-%s/data" % (name, guid)
if not os.path.exists(filename):
# variable not found
return None
return file(filename).read()
def available(self):
return os.path.isdir(self.sysfs_efi_vars_dir)
def __iter__(self):
if os.path.isdir(self.sysfs_efi_vars_dir):
for filename in os.listdir(self.sysfs_efi_vars_dir):
if filename == '.' or filename == '..':
continue
else:
if os.path.isdir(self.sysfs_efi_vars_dir + '/' + filename):
yield filename
else:
raise StopIteration
def __getitem__(self, key):
filename = self.sysfs_efi_vars_dir + "/%s/data" % key
if not os.path.exists(filename):
# variable not found
return None
return SysfsEfiVariables.read_efi_value(filename)
class WinApiEfiVariables(EfiVariables):
"""EFI variable access for Windows platforms"""
def __init__(self):
# enable required SeSystemEnvironmentPrivilege privilege
privilege = win32security.LookupPrivilegeValue(None, 'SeSystemEnvironmentPrivilege')
token = win32security.OpenProcessToken(win32process.GetCurrentProcess(), win32security.TOKEN_READ|win32security.TOKEN_ADJUST_PRIVILEGES)
win32security.AdjustTokenPrivileges(token, False, [(privilege, win32security.SE_PRIVILEGE_ENABLED)])
win32api.CloseHandle(token)
# import firmware variable API
self.GetFirmwareEnvironmentVariable = ctypes.windll.kernel32.GetFirmwareEnvironmentVariableW
self.GetFirmwareEnvironmentVariable.restype = ctypes.c_int
self.GetFirmwareEnvironmentVariable.argtypes = [ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_void_p, ctypes.c_int]
def read(self, name, guid):
buffer = ctypes.create_string_buffer(32768)
length = self.GetFirmwareEnvironmentVariable(name, "{%s}" % guid, buffer, 32768)
if length == 0:
# FIXME: don't always raise WinError
raise ctypes.WinError()
return buffer[:length]
def available(self):
return True if self.GetFirmwareEnvironmentVariable is not None else False
def __iter__(self):
return None
def __getitem__(self, key):
return None
| aroth-arsoft/arsoft-python | python3/arsoft/efi/__init__.py | Python | gpl-3.0 | 4,063 | 0.006153 |
#coding=utf-8
# SSSD
#
# upgrade_config.py
#
# Copyright (C) Jakub Hrozek <jhrozek@redhat.com> 2009
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import os
import sys
import shutil
import traceback
from optparse import OptionParser
from .ipachangeconf import openLocked
from .ipachangeconf import SSSDChangeConf
class SSSDConfigFile(SSSDChangeConf):
def __init__(self, filename):
SSSDChangeConf.__init__(self)
self.filename = filename
f = openLocked(self.filename, 0o600, False)
self.opts = self.parse(f)
f.close()
def _backup_file(self, file_name):
" Copy the file we operate on to a backup location "
shutil.copy(file_name, file_name + self.backup_suffix)
# make sure we don't leak data, force permissions on the backup
os.chmod(file_name + self.backup_suffix, 0o600)
def get_version(self):
ver = self.get_option_index('sssd', 'config_file_version')[1]
if not ver:
return 1
try:
return int(ver['value'])
except ValueError:
raise SyntaxError('config_file_version not an integer')
def rename_opts(self, parent_name, rename_kw, type='option'):
for new_name, old_name in rename_kw.items():
index, item = self.get_option_index(parent_name, old_name, type)
if item:
item['name'] = new_name
def _add_dns_domain_name(self, domain):
id_provider = self.findOpts(domain['value'], 'option', 'id_provider')[1]
dns_domain_name = { 'type' : 'option',
'name' : 'dns_discovery_domain',
'value' : domain['name'].lstrip('domain/') }
if id_provider['value'] == 'ldap':
server = self.findOpts(domain['value'], 'option', 'ldap_uri')[1]
if not server or "__srv__" in server['value']:
domain['value'].insert(0, dns_domain_name)
return
elif id_provider['value'] == 'ipa':
server = self.findOpts(domain['value'], 'option', 'ipa_server')[1]
if not server or "__srv__" in server['value']:
domain['value'].insert(0, dns_domain_name)
return
auth_provider = self.findOpts(domain['value'], 'option', 'auth_provider')[1]
if auth_provider and auth_provider['value'] == 'krb5':
server = self.findOpts(domain['value'], 'option', 'krb5_server')[1]
if not server or "__srv__" in server['value']:
domain['value'].insert(0, dns_domain_name)
def _do_v2_changes(self):
# remove Data Provider
srvlist = self.get_option_index('sssd', 'services')[1]
if srvlist:
services = [ srv.strip() for srv in srvlist['value'].split(',') ]
if 'dp' in services:
services.remove('dp')
srvlist['value'] = ", ".join([srv for srv in services])
self.delete_option('section', 'dp')
for domain in [ s for s in self.sections() if s['name'].startswith("domain/") ]:
# remove magic_private_groups from all domains
self.delete_option_subtree(domain['value'], 'option', 'magic_private_groups')
# check if we need to add dns_domain
self._add_dns_domain_name(domain)
def _update_option(self, to_section_name, from_section_name, opts):
to_section = [ s for s in self.sections() if s['name'].strip() == to_section_name ]
from_section = [ s for s in self.sections() if s['name'].strip() == from_section_name ]
if len(to_section) > 0 and len(from_section) > 0:
vals = to_section[0]['value']
for o in [one_opt for one_opt in from_section[0]['value'] if one_opt['name'] in opts]:
updated = False
for v in vals:
if v['type'] == 'empty':
continue
# if already in list, just update
if o['name'] == v['name']:
o['value'] = v['value']
updated = True
# not in list, add there
if not updated:
vals.insert(0, { 'name' : o['name'], 'type' : o['type'], 'value' : o['value'] })
def _migrate_enumerate(self, domain):
" Enumerate was special as it turned into bool from (0,1,2,3) enum "
enum = self.findOpts(domain, 'option', 'enumerate')[1]
if enum:
if enum['value'].upper() not in ['TRUE', 'FALSE']:
try:
enum['value'] = int(enum['value'])
except ValueError:
raise ValueError('Cannot convert value %s in domain %s' % (enum['value'], domain['name']))
if enum['value'] == 0:
enum['value'] = 'FALSE'
elif enum['value'] > 0:
enum['value'] = 'TRUE'
else:
raise ValueError('Cannot convert value %s in domain %s' % (enum['value'], domain['name']))
def _migrate_domain(self, domain):
# rename the section
domain['name'] = domain['name'].strip().replace('domains', 'domain')
# Generic options - new:old
generic_kw = { 'min_id' : 'minId',
'max_id': 'maxId',
'timeout': 'timeout',
'magic_private_groups' : 'magicPrivateGroups',
'cache_credentials' : 'cache-credentials',
'id_provider' : 'provider',
'auth_provider' : 'auth-module',
'access_provider' : 'access-module',
'chpass_provider' : 'chpass-module',
'use_fully_qualified_names' : 'useFullyQualifiedNames',
'store_legacy_passwords' : 'store-legacy-passwords',
}
# Proxy options
proxy_kw = { 'proxy_pam_target' : 'pam-target',
'proxy_lib_name' : 'libName',
}
# LDAP options - new:old
ldap_kw = { 'ldap_uri' : 'ldapUri',
'ldap_schema' : 'ldapSchema',
'ldap_default_bind_dn' : 'defaultBindDn',
'ldap_default_authtok_type' : 'defaultAuthtokType',
'ldap_default_authtok' : 'defaultAuthtok',
'ldap_user_search_base' : 'userSearchBase',
'ldap_user_search_scope' : 'userSearchScope',
'ldap_user_search_filter' : 'userSearchFilter',
'ldap_user_object_class' : 'userObjectClass',
'ldap_user_name' : 'userName',
'ldap_user_pwd' : 'userPassword',
'ldap_user_uid_number' : 'userUidNumber',
'ldap_user_gid_number' : 'userGidNumber',
'ldap_user_gecos' : 'userGecos',
'ldap_user_home_directory' : 'userHomeDirectory',
'ldap_user_shell' : 'userShell',
'ldap_user_uuid' : 'userUUID',
'ldap_user_principal' : 'userPrincipal',
'ldap_force_upper_case_realm' : 'force_upper_case_realm',
'ldap_user_fullname' : 'userFullname',
'ldap_user_member_of' : 'userMemberOf',
'ldap_user_modify_timestamp' : 'modifyTimestamp',
'ldap_group_search_base' : 'groupSearchBase',
'ldap_group_search_scope' : 'groupSearchScope',
'ldap_group_search_filter' : 'groupSearchFilter',
'ldap_group_object_class' : 'groupObjectClass',
'ldap_group_name' : 'groupName',
'ldap_group_pwd' : 'userPassword',
'ldap_group_gid_number' : 'groupGidNumber',
'ldap_group_member' : 'groupMember',
'ldap_group_uuid' : 'groupUUID',
'ldap_group_modify_timestamp' : 'modifyTimestamp',
'ldap_network_timeout' : 'network_timeout',
'ldap_offline_timeout' : 'offline_timeout',
'ldap_enumeration_refresh_timeout' : 'enumeration_refresh_timeout',
'ldap_stale_time' : 'stale_time',
'ldap_opt_timeout' : 'opt_timeout',
'ldap_tls_reqcert' : 'tls_reqcert',
'ldap_netgroup_search_base' : 'netgroupSearchBase',
'ldap_netgroup_object_class' : 'netgroupObjectClass',
'ldap_netgroup_name' : 'netgroupName',
'ldap_netgroup_member' : 'netgroupMember',
'ldap_netgroup_triple' : 'netgroupTriple',
'ldap_netgroup_modify_timestamp' : 'netgroupModifyTimestamp',
}
krb5_kw = { 'krb5_server' : 'krb5KDCIP',
'krb5_realm' : 'krb5REALM',
'krb5_try_simple_upn' : 'krb5try_simple_upn',
'krb5_changepw_principal' : 'krb5changepw_principle',
'krb5_ccachedir' : 'krb5ccache_dir',
'krb5_auth_timeout' : 'krb5auth_timeout',
'krb5_ccname_template' : 'krb5ccname_template',
}
user_defaults_kw = { 'default_shell' : 'defaultShell',
'base_directory' : 'baseDirectory',
}
self._migrate_enumerate(domain['value'])
self.rename_opts(domain['name'], generic_kw)
self.rename_opts(domain['name'], proxy_kw)
self.rename_opts(domain['name'], ldap_kw)
self.rename_opts(domain['name'], krb5_kw)
# remove obsolete libPath option
self.delete_option_subtree(domain['value'], 'option', 'libPath')
# configuration files before 0.5.0 did not enforce provider= in local domains
# it did special-case by domain name (LOCAL)
prvindex, prv = self.findOpts(domain['value'], 'option', 'id_provider')
if not prv and domain['name'] == 'domain/LOCAL':
prv = { 'type' : 'option',
'name' : 'id_provider',
'value' : 'local',
}
domain['value'].insert(0, prv)
# if domain was local, update with parameters from [user_defaults]
if prv['value'] == 'local':
self._update_option(domain['name'], 'user_defaults', user_defaults_kw.values())
self.delete_option('section', 'user_defaults')
self.rename_opts(domain['name'], user_defaults_kw)
# if domain had provider = files, unroll that into provider=proxy, proxy_lib_name=files
if prv['value'] == 'files':
prv['value'] = 'proxy'
libkw = { 'type' : 'option',
'name' : 'proxy_lib_name',
'value' : 'files',
}
domain['value'].insert(prvindex+1, libkw)
def _migrate_domains(self):
for domain in [ s for s in self.sections() if s['name'].startswith("domains/") ]:
self._migrate_domain(domain)
def _update_if_exists(self, opt, to_name, from_section, from_name):
index, item = self.get_option_index(from_section, from_name)
if item:
item['name'] = to_name
opt.append(item)
def _migrate_services(self):
# [service] - options common to all services, no section as in v1
service_kw = { 'reconnection_retries' : 'reconnection_retries',
'debug_level' : 'debug-level',
'debug_timestamps' : 'debug-timestamps',
'command' : 'command',
'timeout' : 'timeout',
}
# rename services sections
names_kw = { 'nss' : 'services/nss',
'pam' : 'services/pam',
'dp' : 'services/dp',
}
self.rename_opts(None, names_kw, 'section')
# [sssd] - monitor service
sssd_kw = [
{ 'type' : 'option',
'name' : 'config_file_version',
'value' : '2',
'action': 'set',
}
]
self._update_if_exists(sssd_kw, 'domains',
'domains', 'domains')
self._update_if_exists(sssd_kw, 'services',
'services', 'activeServices')
self._update_if_exists(sssd_kw, 'sbus_timeout',
'services/monitor', 'sbusTimeout')
self._update_if_exists(sssd_kw, 're_expression',
'names', 're-expression')
self._update_if_exists(sssd_kw, 're_expression',
'names', 'full-name-format')
self.add_section('sssd', sssd_kw)
# update from general services section and monitor
self._update_option('sssd', 'services', service_kw.values())
self._update_option('sssd', 'services/monitor', service_kw.values())
# [nss] - Name service
nss_kw = { 'enum_cache_timeout' : 'EnumCacheTimeout',
'entry_cache_timeout' : 'EntryCacheTimeout',
'entry_cache_nowait_timeout' : 'EntryCacheNoWaitRefreshTimeout',
'entry_negative_timeout ' : 'EntryNegativeTimeout',
'filter_users' : 'filterUsers',
'filter_groups' : 'filterGroups',
'filter_users_in_groups' : 'filterUsersInGroups',
}
nss_kw.update(service_kw)
self._update_option('nss', 'services', service_kw.values())
self.rename_opts('nss', nss_kw)
# [pam] - Authentication service
pam_kw = {}
pam_kw.update(service_kw)
self._update_option('pam', 'services', service_kw.values())
self.rename_opts('pam', pam_kw)
# remove obsolete sections
self.delete_option('section', 'services')
self.delete_option('section', 'names')
self.delete_option('section', 'domains')
self.delete_option('section', 'services/monitor')
def v2_changes(self, out_file_name, backup=True):
# read in the old file, make backup if needed
if backup:
self._backup_file(self.filename)
self._do_v2_changes()
# all done, write the file
of = open(out_file_name, "wb")
output = self.dump(self.opts)
of.write(output)
of.close()
# make sure it has the right permissions too
os.chmod(out_file_name, 0o600)
def upgrade_v2(self, out_file_name, backup=True):
# read in the old file, make backup if needed
if backup:
self._backup_file(self.filename)
# do the migration to v2 format
# do the upgrade
self._migrate_services()
self._migrate_domains()
# also include any changes in the v2 format
self._do_v2_changes()
# all done, write the file
of = open(out_file_name, "wb")
output = self.dump(self.opts)
of.write(output)
of.close()
# make sure it has the right permissions too
os.chmod(out_file_name, 0o600)
def parse_options():
parser = OptionParser()
parser.add_option("-f", "--file",
dest="filename", default="/etc/sssd/sssd.conf",
help="Set input file to FILE", metavar="FILE")
parser.add_option("-o", "--outfile",
dest="outfile", default=None,
help="Set output file to OUTFILE", metavar="OUTFILE")
parser.add_option("", "--no-backup", action="store_false",
dest="backup", default=True,
help="""Do not provide backup file after conversion.
The script copies the original file with the suffix .bak
by default""")
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", default=False,
help="Be verbose")
(options, args) = parser.parse_args()
if len(args) > 0:
print >>sys.stderr, "Stray arguments: %s" % ' '.join([a for a in args])
return None
# do the conversion in place by default
if not options.outfile:
options.outfile = options.filename
return options
def verbose(msg, verbose):
if verbose:
print(msg)
def main():
options = parse_options()
if not options:
print >>sys.stderr, "Cannot parse options"
return 1
try:
config = SSSDConfigFile(options.filename)
except SyntaxError:
verbose(traceback.format_exc(), options.verbose)
print >>sys.stderr, "Cannot parse config file %s" % options.filename
return 1
except Exception as e:
print("ERROR: %s" % e)
verbose(traceback.format_exc(), options.verbose)
return 1
# make sure we keep strict settings when creating new files
os.umask(0o077)
version = config.get_version()
if version == 2:
verbose("Looks like v2, only checking changes", options.verbose)
try:
config.v2_changes(options.outfile, options.backup)
except Exception as e:
print("ERROR: %s" % e)
verbose(traceback.format_exc(), options.verbose)
return 1
elif version == 1:
verbose("Looks like v1, performing full upgrade", options.verbose)
try:
config.upgrade_v2(options.outfile, options.backup)
except Exception as e:
print("ERROR: %s" % e)
verbose(traceback.format_exc(), options.verbose)
return 1
else:
print("Can only upgrade from v1 to v2, file %s looks like version %d" % (options.filename, config.get_version()), file=sys.stderr)
return 1
return 0
if __name__ == "__main__":
ret = main()
sys.exit(ret)
| spbnick/sssd | src/config/SSSDConfig/sssd_upgrade_config.py | Python | gpl-3.0 | 18,663 | 0.008359 |
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
import logging
from navitiacommon import models
from .tests_mechanism import AbstractTestFixture, dataset
from .check_utils import *
from contextlib import contextmanager
from flask import appcontext_pushed, g
from jormungandr import app
import json
import logging
from nose.util import *
authorizations = {
'bob': {
"main_routing_test": {'ALL': True},
"departure_board_test": {'ALL': False},
"empty_routing_test": {'ALL': False}
},
'bobette': {
#bobette cannot access anything
"main_routing_test": {'ALL': False},
"departure_board_test": {'ALL': False},
"empty_routing_test": {'ALL': False}
},
'bobitto': {
#bobitto can access all since empty_routing_test is free
"main_routing_test": {'ALL': True},
"departure_board_test": {'ALL': True},
"empty_routing_test": {'ALL': False}
},
'tgv': {
#tgv can only access main_routing_test
"main_routing_test": {'ALL': True},
"departure_board_test": {'ALL': False},
"empty_routing_test": {'ALL': False}
},
'test_user_blocked': {
"main_routing_test": {'ALL': True},
"departure_board_test": {'ALL': True},
"empty_routing_test": {'ALL': True}
},
'test_user_not_blocked': {
"main_routing_test": {'ALL': True},
"departure_board_test": {'ALL': True},
"empty_routing_test": {'ALL': True}
},
}
class FakeUser:
"""
We create a user independent from a database
"""
def __init__(self, name, id, have_access_to_free_instances=True, is_super_user=False, is_blocked=False):
"""
We just need a fake user, we don't really care about its identity
"""
self.id = id
self.login = name
self.have_access_to_free_instances = have_access_to_free_instances
self.is_super_user = is_super_user
self.end_point_id = None
self._is_blocked = is_blocked
@classmethod
def get_from_token(cls, token):
"""
Create an empty user
"""
return user_in_db[token]
def has_access(self, instance_name, api_name):
"""
This is made to avoid using of database
"""
return authorizations[self.login][instance_name][api_name]
def is_blocked(self, datetime_utc):
"""
Return True if user is blocked else False
"""
return self._is_blocked
class FakeInstance(models.Instance):
def __init__(self, name, is_free):
self.name = name
self.is_free = is_free
self.id = name
@classmethod
def get_by_name(cls, name):
return mock_instances.get(name)
user_in_db = {
'bob': FakeUser('bob', 1),
'bobette': FakeUser('bobette', 2),
'bobitto': FakeUser('bobitto', 3),
'tgv': FakeUser('tgv', 4, have_access_to_free_instances=False),
'test_user_blocked': FakeUser('test_user_blocked', 5, True, False, True),
'test_user_not_blocked': FakeUser('test_user_not_blocked', 6, True, False, False)
}
mock_instances = {
'main_routing_test': FakeInstance('main_routing_test', False),
'departure_board_test': FakeInstance('departure_board_test', False),
'empty_routing_test': FakeInstance('empty_routing_test', True),
}
@contextmanager
def user_set(app, user_name):
"""
add user
"""
def handler(sender, **kwargs):
g.user = FakeUser.get_from_token(user_name)
with appcontext_pushed.connected_to(handler, app):
yield
class AbstractTestAuthentication(AbstractTestFixture):
def setUp(self):
self.old_public_val = app.config['PUBLIC']
app.config['PUBLIC'] = False
self.app = app.test_client()
self.old_instance_getter = models.Instance.get_by_name
models.Instance.get_by_name = FakeInstance.get_by_name
def tearDown(self):
app.config['PUBLIC'] = self.old_public_val
models.Instance.get_by_name = self.old_instance_getter
@dataset({"main_routing_test": {}, "departure_board_test": {}})
class TestBasicAuthentication(AbstractTestAuthentication):
def test_coverage(self):
"""
User only has access to the first region
"""
with user_set(app, 'bob'):
response_obj = self.app.get('/v1/coverage')
response = json.loads(response_obj.data)
assert('regions' in response)
assert(len(response['regions']) == 1)
assert(response['regions'][0]['id'] == "main_routing_test")
def test_auth_required(self):
"""
if no token is given we are asked to log in (code 401) and a chalenge is sent (header WWW-Authenticate)
"""
response_obj = self.app.get('/v1/coverage')
assert response_obj.status_code == 401
assert 'WWW-Authenticate' in response_obj.headers
def test_status_code(self):
"""
We query the api with user 1 who have access to the main routintg test and not to the departure board
"""
requests_status_codes = [
('/v1/coverage/main_routing_test', 200),
('/v1/coverage/departure_board_test', 403),
# stopA and stopB and in main routing test, all is ok
('/v1/journeys?from=stopA&to=stopB&datetime=20120614T080000', 200),
# stop1 is in departure board -> KO
('/v1/journeys?from=stopA&to=stop2&datetime=20120614T080000', 403),
# stop1 and stop2 are in departure board -> KO
('/v1/journeys?from=stop1&to=stop2&datetime=20120614T080000', 403)
]
with user_set(app, 'bob'):
for request, status_code in requests_status_codes:
assert(self.app.get(request).status_code == status_code)
def test_unkown_region(self):
"""
the authentication process must not mess if the region is not found
"""
with user_set(app, 'bob'):
r, status = self.query_no_assert('/v1/coverage/the_marvelous_unknown_region/stop_areas')
assert status == 404
assert 'error' in r
assert get_not_null(r, 'error')['message'] \
== "The region the_marvelous_unknown_region doesn't exists"
@dataset({"main_routing_test": {}})
class TestIfUserIsBlocked(AbstractTestAuthentication):
def test_status_code(self):
"""
We query the api with user 5 who must be blocked
"""
requests_status_codes = [
('/v1/coverage/main_routing_test', 429),
('/v1/coverage/departure_board_test', 429)
]
with user_set(app, 'test_user_blocked'):
for request, status_code in requests_status_codes:
assert(self.app.get(request).status_code == status_code)
@dataset({"main_routing_test": {}})
class TestIfUserIsNotBlocked(AbstractTestAuthentication):
def test_status_code(self):
"""
We query the api with user 6 who must not be blocked
"""
requests_status_codes = [('/v1/coverage/main_routing_test', 200)]
with user_set(app, 'test_user_not_blocked'):
for request, status_code in requests_status_codes:
assert(self.app.get(request).status_code == status_code)
@dataset({"main_routing_test": {}, "departure_board_test": {}, "empty_routing_test": {}})
class TestOverlappingAuthentication(AbstractTestAuthentication):
def test_coverage(self):
with user_set(app, 'bobitto'):
response = self.query('v1/coverage')
r = get_not_null(response, 'regions')
region_ids = {region['id']: region for region in r}
assert len(region_ids) == 3
assert 'main_routing_test' in region_ids
assert 'departure_board_test' in region_ids
# bob have not the access to this region, but the region is free so it is there anyway
assert 'empty_routing_test' in region_ids
with user_set(app, 'bobette'):
response = self.query('v1/coverage')
r = get_not_null(response, 'regions')
region_ids = {region['id']: region for region in r}
assert len(region_ids) == 1
# bobette does not have access to anything, so we only have the free region here
assert 'empty_routing_test' in region_ids
with user_set(app, 'tgv'):
response = self.query('v1/coverage')
r = get_not_null(response, 'regions')
region_ids = {region['id']: region for region in r}
assert len(region_ids) == 1
# tgv must not see free regions
assert 'empty_routing_test' not in region_ids
def test_pt_ref_for_bobitto(self):
with user_set(app, 'bobitto'):
response = self.query('v1/coverage/main_routing_test/stop_points')
assert 'error' not in response
response = self.query('v1/coverage/departure_board_test/stop_points')
assert 'error' not in response
#the empty region is empty, so no stop points. but we check that we have no authentication errors
response, status = self.query_no_assert('v1/coverage/empty_routing_test/stop_points')
assert status == 404
assert 'error' in response
assert 'unknown_object' in response['error']['id']
def test_pt_ref_for_bobette(self):
with user_set(app, 'bobette'):
_, status = self.query_no_assert('v1/coverage/main_routing_test/stop_points')
assert status == 403
_, status = self.query_no_assert('v1/coverage/departure_board_test/stop_points')
assert status == 403
_, status = self.query_no_assert('v1/coverage/empty_routing_test/stop_points')
assert status == 404 # same than for bobitto, we have access to the region, but no stops
def test_stop_schedules_for_bobette(self):
with user_set(app, 'bobette'):
_, status = self.query_no_assert('v1/coverage/main_routing_test/stop_areas/stopA/stop_schedules')
assert status == 403
_, status = self.query_no_assert('v1/coverage/departure_board_test/stop_areas/stop1/stop_schedules')
assert status == 403
#we get a 404 (because 'stopbidon' cannot be found) and not a 403
_, status = self.query_no_assert('v1/coverage/empty_routing_test/stop_areas/'
'stopbidon/stop_schedules')
assert status == 404
def test_stop_schedules_for_tgv(self):
with user_set(app, 'tgv'):
response = self.query('v1/coverage/main_routing_test/stop_areas/stopA/stop_schedules?from_datetime=20120614T080000')
assert 'error' not in response
_, status = self.query_no_assert('v1/coverage/departure_board_test/stop_areas/stop1/stop_schedules')
eq_(status, 403)
_, status = self.query_no_assert('v1/coverage/empty_routing_test/stop_areas/'
'stopbidon/stop_schedules')
eq_(status, 403)
def test_stop_schedules_for_bobitto(self):
with user_set(app, 'bobitto'):
response = self.query('v1/coverage/main_routing_test/stop_areas/'
'stopA/stop_schedules?from_datetime=20120614T080000')
assert 'error' not in response
response = self.query('v1/coverage/departure_board_test/stop_areas/'
'stop1/stop_schedules?from_datetime=20120614T080000')
assert 'error' not in response
_, status = self.query_no_assert('v1/coverage/empty_routing_test/stop_areas/'
'stopbidon/stop_schedules?from_datetime=20120614T080000')
assert status == 404
def test_journeys_for_bobitto(self):
with user_set(app, 'bobitto'):
response = self.query('/v1/journeys?from=stopA&to=stopB&datetime=20120614T080000')
assert 'error' not in response
response = self.query('/v1/journeys?from=stop1&to=stop2&datetime=20120614T080000')
assert 'error' not in response
def test_journeys_for_tgv(self):
with user_set(app, 'tgv'):
response = self.query('/v1/journeys?from=stopA&to=stopB&datetime=20120614T080000')
assert 'error' not in response
_, status = self.query_no_assert('/v1/journeys?from=stop1&to=stop2&datetime=20120614T080000')
eq_(status, 403)
_, status = self.query_no_assert('/v1/coverage/empty_routing_test/journeys?from=stop1&to=stop2&datetime=20120614T080000')
eq_(status, 403)
def test_wrong_journeys_for_bobitto(self):
"""
we query with one stop for main_routing and one from departure_board,
we have access to both, but we get an error
"""
with user_set(app, 'bobitto'):
response, status = self.query_no_assert('/v1/journeys?from=stopA&to=stop2&datetime=20120614T080000')
assert status == 404
assert 'error' in response and response['error']['id'] == "unknown_object"
def test_journeys_for_bobette(self):
"""
bobette have access only to the empty region which overlap the main routing test
so by stops no access to any region
but by coord she can query the empty_routing_test region
"""
with user_set(app, 'bobette'):
response, status = self.query_no_assert('/v1/journeys?from=stopA&to=stopB&datetime=20120614T080000')
assert status == 403
response, status = self.query_no_assert('/v1/journeys?from=stop1&to=stop2&datetime=20120614T080000')
assert status == 403
response, status = self.query_no_assert('/v1/journeys?from={from_coord}&to={to_coord}&datetime={d}'
.format(from_coord=s_coord, to_coord=r_coord, d='20120614T08'))
assert 'error' in response and response['error']['id'] == "no_origin_nor_destination"
assert status == 404
def test_places_for_bobitto(self):
with user_set(app, "bobitto"):
response = self.query('v1/coverage/main_routing_test/places?q=toto')
assert 'error' not in response
response = self.query('v1/coverage/departure_board_test/places?q=toto')
assert 'error' not in response
response = self.query('v1/coverage/empty_routing_test/places?q=toto')
assert 'error' not in response
# this test suppose no elasticsearch is lanched at localhost
_, status = self.query_no_assert('v1/places?q=toto')
assert status == 500
def test_places_for_bobette(self):
with user_set(app, "bobette"):
_, status = self.query_no_assert('v1/coverage/main_routing_test/places?q=toto')
assert status == 403
_, status = self.query_no_assert('v1/coverage/departure_board_test/places?q=toto')
assert status == 403
response = self.query('v1/coverage/empty_routing_test/places?q=toto')
assert 'error' not in response
# this test suppose no elasticsearch is lanched at localhost
_, status = self.query_no_assert('v1/places?q=toto')
assert status == 500
def test_places_for_tgv(self):
with user_set(app, "tgv"):
response = self.query('v1/coverage/main_routing_test/places?q=toto')
assert 'error' not in response
_, status = self.query_no_assert('v1/coverage/departure_board_test/places?q=toto')
assert status == 403
_, status = self.query_no_assert('v1/coverage/empty_routing_test/places?q=toto')
assert status == 403
# this test suppose no elasticsearch is lanched at localhost
_, status = self.query_no_assert('v1/places?q=toto')
assert status == 500
def test_sort_coverage(self):
with user_set(app, 'bobitto'):
response = self.query('v1/coverage')
regions = get_not_null(response, 'regions')
assert len(regions) == 3
assert regions[0]["name"] == 'departure board'
assert regions[1]["name"] == 'empty routing'
assert regions[2]["name"] == 'routing api data'
#TODO add more tests on:
# * coords
# * disruptions
# * get by external code ?
| lrocheWB/navitia | source/jormungandr/tests/authentication_tests.py | Python | agpl-3.0 | 17,792 | 0.003147 |
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
from indra.databases import hgnc_client
from indra.util import unicode_strs
from nose.plugins.attrib import attr
def test_get_uniprot_id():
hgnc_id = '6840'
uniprot_id = hgnc_client.get_uniprot_id(hgnc_id)
assert uniprot_id == 'Q02750'
assert unicode_strs(uniprot_id)
def test_get_uniprot_id_none():
# This HGNC entry doesn't have a UniProt ID
hgnc_id = '37187'
uniprot_id = hgnc_client.get_uniprot_id(hgnc_id)
assert uniprot_id is None, uniprot_id
def test_get_hgnc_name():
hgnc_id = '3236'
hgnc_name = hgnc_client.get_hgnc_name(hgnc_id)
assert hgnc_name == 'EGFR'
assert unicode_strs(hgnc_name)
@attr('webservice')
def test_get_hgnc_name_nonexistent():
hgnc_id = '123456'
hgnc_name = hgnc_client.get_hgnc_name(hgnc_id)
assert hgnc_name is None
assert unicode_strs(hgnc_name)
def test_entrez_hgnc():
entrez_id = '653509'
hgnc_id = hgnc_client.get_hgnc_from_entrez(entrez_id)
assert hgnc_id == '10798'
def test_entrez_hgnc_none():
entrez_id = 'xxx'
hgnc_id = hgnc_client.get_hgnc_from_entrez(entrez_id)
assert hgnc_id is None
def test_ensembl_hgnc():
ensembl_id = 'ENSG00000006071'
hgnc_id = hgnc_client.get_hgnc_from_ensembl(ensembl_id)
assert hgnc_id == '59', hgnc_id
assert hgnc_client.get_ensembl_id(hgnc_id) == ensembl_id
def test_mouse_map():
hgnc_id1 = hgnc_client.get_hgnc_from_mouse('109599')
hgnc_id2 = hgnc_client.get_hgnc_from_mouse('MGI:109599')
assert hgnc_id1 == '4820'
assert hgnc_id2 == '4820'
hgnc_id = hgnc_client.get_hgnc_from_mouse('xxx')
assert hgnc_id is None
def test_rat_map():
hgnc_id1 = hgnc_client.get_hgnc_from_rat('6496784')
hgnc_id2 = hgnc_client.get_hgnc_from_rat('RGD:6496784')
assert hgnc_id1 == '44155'
assert hgnc_id2 == '44155'
hgnc_id = hgnc_client.get_hgnc_from_rat('xxx')
assert hgnc_id is None
def test_is_category():
assert hgnc_client.is_kinase('MAPK1')
assert not hgnc_client.is_kinase('EGF')
assert hgnc_client.is_phosphatase('PTEN')
assert not hgnc_client.is_phosphatase('KRAS')
assert hgnc_client.is_transcription_factor('FOXO3')
assert not hgnc_client.is_transcription_factor('AKT1')
def test_get_current_id():
# Current symbol
assert hgnc_client.get_current_hgnc_id('BRAF') == '1097'
# Outdated symbol, one ID
assert hgnc_client.get_current_hgnc_id('SEPT7') == '1717'
# Outdated symbol, multiple IDs
ids = hgnc_client.get_current_hgnc_id('HOX1')
assert len(ids) == 10
assert '5101' in ids
def test_gene_type():
assert hgnc_client.get_gene_type('1097') == 'gene with protein product'
assert hgnc_client.get_gene_type('31547') == 'RNA, micro'
| sorgerlab/belpy | indra/tests/test_hgnc_client.py | Python | mit | 2,834 | 0 |
"""
File: chromatic_tonal_reflection_function.py
Purpose: Class defining a function that tonally reflects over a given tone.
"""
from tonalmodel.diatonic_foundation import DiatonicFoundation
from tonalmodel.tonality import Tonality
from transformation.functions.pitchfunctions.diatonic_pitch_reflection_function import FlipType
from transformation.functions.tonalfunctions.tonal_function import TonalFunction
from tonalmodel.interval import Interval
class ChromaticTonalReflectionFunction(TonalFunction):
def __init__(self, domain_tonality, cue_tone, reflect_type=FlipType.CenterTone):
"""
Constructor
:param domain_tonality: Scalar tonality being reflected
:param cue_tone: Cue tone for reflection (must be in domain tonality).
:param reflect_type: See FlipType for types of reflection.
"""
self.__domain_tonality = domain_tonality
self.__cue_tone = cue_tone
self.__reflect_type = reflect_type
if cue_tone not in domain_tonality.annotation:
raise Exception('Cue tone {0} is not in tonality {1}.'.format(cue_tone.diatonic_symbol, domain_tonality))
self.__primary_map, tonality_list = self._build_primary_map()
if len(tonality_list) == 0:
raise Exception('Tonal relfection on {0} cue {1} could not resolve range tonality.'.format(
self.domain_tonality, self.cue_tone))
# We like should do some kind of matching of domain to range, e.g. minor-type --> minor-type.from
# TODO: Explore how to improve this setting when tonality_list has more than 1 element.
self.__range_tonality = tonality_list[0]
TonalFunction.__init__(self, self.domain_tonality, self.range_tonality, self.tonal_map,
self._build_extension_map())
@property
def cue_tone(self):
return self.__cue_tone
@property
def reflect_type(self):
return self.__reflect_type
@property
def tonal_map(self):
return self.__primary_map
@property
def domain_tonality(self):
return self.__domain_tonality
@property
def range_tonality(self):
return self.__range_tonality
def _build_primary_map(self):
domain_scale = self.domain_tonality.annotation[:-1]
tonal_map = dict()
if self.reflect_type == FlipType.CenterTone:
for tone in domain_scale:
interval = Interval.calculate_tone_interval(tone, self.cue_tone)
end_tone = interval.get_end_tone(self.cue_tone)
tonal_map[tone] = end_tone
else:
if self.reflect_type == FlipType.LowerNeighborOfPair:
lower_index = domain_scale.index(self.cue_tone)
upper_index = (lower_index + 1) % len(domain_scale)
else:
upper_index = domain_scale.index(self.cue_tone)
lower_index = (upper_index - 1) % len(domain_scale)
tonal_map[domain_scale[upper_index]] = domain_scale[lower_index]
tonal_map[domain_scale[lower_index]] = domain_scale[upper_index]
last_lower = domain_scale[lower_index]
last_upper = domain_scale[upper_index]
for i in list(reversed(range(0, lower_index))):
new_lower = domain_scale[i]
interval = Interval.calculate_tone_interval(new_lower, last_lower)
new_upper = interval.get_end_tone(last_upper)
tonal_map[new_lower] = new_upper
last_lower = new_lower
last_upper = new_upper
last_lower = domain_scale[lower_index]
last_upper = domain_scale[upper_index]
for i in list(range((upper_index + 1), len(domain_scale))):
new_upper = domain_scale[i]
interval = Interval.calculate_tone_interval(last_upper, new_upper)
new_lower = interval.negation().get_end_tone(last_lower)
tonal_map[new_upper] = new_lower
last_lower = new_lower
last_upper = new_upper
range_tones = list(reversed([tonal_map[tone] for tone in domain_scale]))
first_tone = range_tones[-1]
range_tones = [first_tone] + range_tones[:-1]
# Determine the tonality of the range
range_tonality = Tonality.find_tonality(range_tones)
return tonal_map, range_tonality
def _build_extension_map(self):
ltrs = 'CDEFGAB'
extension = dict()
domain_scale = self.domain_tonality.annotation[:-1]
domain_start_index = ltrs.index(domain_scale[0].diatonic_letter)
domain_index_list = list(ltrs[domain_start_index:] + ltrs[:domain_start_index])
# One time calculations based on lower upper
if self.reflect_type != FlipType.CenterTone:
if self.reflect_type == FlipType.LowerNeighborOfPair:
lower_domain_index = domain_scale.index(self.cue_tone)
upper_domain_index = (lower_domain_index + 1) % len(domain_scale)
else:
upper_domain_index = domain_scale.index(self.cue_tone)
lower_domain_index = (upper_domain_index - 1) % len(domain_scale)
lower_tone = domain_scale[lower_domain_index]
upper_tone = domain_scale[upper_domain_index]
lower_ltr_index = domain_index_list.index(lower_tone.diatonic_letter)
lower_augmentation = lower_tone.augmentation_offset
upper_ltr_index = domain_index_list.index(upper_tone.diatonic_letter)
upper_augmentation = upper_tone.augmentation_offset
else:
lower_tone = None
upper_tone = None
lower_ltr_index = None
lower_augmentation = None
upper_ltr_index = None
upper_augmentation = None
for l in 'CDEFGAB':
for aug in ['bb', 'b', '', '#', "##"]:
tone = DiatonicFoundation.get_tone(l + aug)
if tone not in self.tonal_map.keys():
if self.reflect_type == FlipType.CenterTone:
interval = Interval.calculate_tone_interval(tone, self.cue_tone)
if interval: # Some intervals are illegal, eg Cbb --> C, for now ignore
end_tone = interval.get_end_tone(self.cue_tone)
extension[tone] = end_tone
else:
tone_ltr_index = domain_index_list.index(tone.diatonic_letter)
tone_augmentation = tone.augmentation_offset
if tone_ltr_index >= 0 and (tone_ltr_index < lower_ltr_index or
(tone_ltr_index == lower_ltr_index and
tone_augmentation <= lower_augmentation)):
interval = Interval.calculate_tone_interval(tone, lower_tone)
if interval:
upper = interval.get_end_tone(upper_tone)
extension[tone] = upper
elif tone_ltr_index < len(domain_index_list) and (tone_ltr_index > upper_ltr_index or
(tone_ltr_index == upper_ltr_index and
tone_augmentation >= upper_augmentation)):
interval = Interval.calculate_tone_interval(tone, upper_tone)
if interval:
new_lower = interval.get_end_tone(lower_tone)
extension[tone] = new_lower
else: # Between the two limits
upper_interval = Interval.calculate_tone_interval(tone, upper_tone)
lower_interval = Interval.calculate_tone_interval(lower_tone, tone)
if upper_interval is None and lower_interval is None:
continue
elif upper_interval is None:
extension[tone] = upper_tone
elif lower_interval is None:
extension[tone] = lower_tone
else:
if abs(lower_interval.chromatic_distance) <= abs(upper_interval.chromatic_distance):
extension[tone] = lower_interval.negation().get_end_tone(upper_tone)
else:
extension[tone] = upper_interval.negation().get_end_tone(lower_tone)
return extension
| dpazel/music_rep | transformation/functions/tonalfunctions/chromatic_tonal_reflection_function.py | Python | mit | 8,810 | 0.003973 |
from kivy.uix.label import Label
from kivy.uix.behaviors import ButtonBehavior
from widgets.layoutint import GridLayoutInt
from kivy.uix.image import Image
from kivy.properties import StringProperty, ListProperty, ObjectProperty, NumericProperty, BooleanProperty
from kivy.compat import string_types
from kivy.factory import Factory
class BarMiddleLabel(Label):
pass
class BarMiddleImage(Image):
pass
class BarMiddleButton(ButtonBehavior, GridLayoutInt):
title = StringProperty()
class Bar(GridLayoutInt):
__events__ = ('on_left_click', 'on_right_click')
screen = ObjectProperty()
color = ListProperty([1, 1, 1, 1])
left_icon = StringProperty('')
right_icon = StringProperty('')
hide_right_icon = BooleanProperty(False)
middle_cls = ObjectProperty(None, allownone=True)
middle = ObjectProperty()
shadow_height = NumericProperty(0)
def __init__(self, **kwargs):
super(Bar, self).__init__(**kwargs)
self._resolve_middle_cls()
self.bind(middle_cls=self._resolve_middle_cls)
def _resolve_middle_cls(self, *args):
if not self.middle_cls:
return
if self.middle:
self.remove_widget(self.middle)
middle_cls = self.middle_cls
if isinstance(middle_cls, string_types):
middle_cls = Factory.get(middle_cls)
self.middle = middle_cls(screen=self.screen)
self.add_widget(self.middle, 1)
def on_left_click(self, button_box):
pass
def on_right_click(self, button_box):
pass
| insiderr/insiderr-app | app/widgets/bar.py | Python | gpl-3.0 | 1,558 | 0.000642 |
# dirtool.py - diff tool for directories
# Copyright (C) 2018 Ingo Ruhnke <grumbel@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PyQt5.QtCore import QObject
from abc import abstractmethod
class ExtractorResult:
SUCCESS = 0
FAILURE = 1
WORKING = 2
@staticmethod
def success(message: str = "") -> 'ExtractorResult':
return ExtractorResult(ExtractorResult.SUCCESS, message)
@staticmethod
def failure(message: str) -> 'ExtractorResult':
return ExtractorResult(ExtractorResult.FAILURE, message)
def __init__(self, status: int, message: str = "") -> None:
self.status = status
self.message = message
def __str__(self) -> str:
return "ExtractorResult({}, \"{}\")".format(self.status, self.message)
class Extractor(QObject):
def __init__(self) -> None:
super().__init__()
@property
@abstractmethod
def sig_entry_extracted(self):
pass
@property
@abstractmethod
def sig_finished(self):
pass
@abstractmethod
def extract(self) -> ExtractorResult:
pass
def interrupt(self) -> None:
pass
def make_extractor(filename: str, outdir: str)-> Extractor:
from dirtools.rar_extractor import RarExtractor
from dirtools.sevenzip_extractor import SevenZipExtractor
from dirtools.libarchive_extractor import LibArchiveExtractor
# FIXME: Use mime-type to decide proper extractor
if filename.lower().endswith(".rar"):
extractor = RarExtractor(filename, outdir)
elif True: # pylint: disable=using-constant-test
extractor = SevenZipExtractor(filename, outdir)
else:
extractor = LibArchiveExtractor(filename, outdir)
return extractor
# EOF #
| Grumbel/dirtool | dirtools/extractor.py | Python | gpl-3.0 | 2,353 | 0.000425 |
from raw import points
from raw import vertices
from raw import edges
from raw import polygons
__all__ = ['points', 'vertices', 'edges', 'polygons']
| selaux/numpy2vtk | numpy2vtk/data/raw/__init__.py | Python | lgpl-3.0 | 150 | 0 |
# This file is part of Codeface. Codeface is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Copyright 2013, Siemens AG, Mitchell Joblin <mitchell.joblin.ext@siemens.com>
# All Rights Reserved.
'''This class is a container to represent a commits relationship to the
other commits present in a particular file at the time of the commit.
The analysis is performed on a file by file basis. A commit can touch multiple
files however this class considers a given commit only in the context of a
single file.'''
import commit
import bisect
class FileDict:
"""
A generic dictionary for saving per-line information.
We assume that this information is available on any line,
and that the information only changes on some lines.
So we only save the information on lines that change that info
and use bisect to retrieve that information (for any line).
"""
def __init__(self, line_list, line_dict):
"""
:rtype : FileDict
"""
self.line_list = line_list
self.line_dict = line_dict
self.lastItem = line_list[-1]
def __init__(self):
"""
:rtype : FileDict
"""
self.line_list = []
self.line_dict = {}
self.lastItem = -1
def __iter__(self):
return self.line_dict.__iter__()
def get_line_info_raw(self, line_nr):
"""
Returns the info for the given line
(if the line was never set, the info for the last set line
is returned)
:param line_nr: the line to retrieve the information for.
:return: the information for the given line.
"""
i = bisect.bisect_right(self.line_list, line_nr)
info_line = self.line_list[i-1]
return self.line_dict[info_line]
def get_line_info(self, line_nr):
return set(self.get_line_info_raw(line_nr))
def add_line(self, line_nr, info):
"""
Add the given information to the current dictionary.
Note: while filling the dictionary the line_nr argument has to
be incremented (this is only to make sure the caller
gets the intended behavior)!
:param line_nr: the line number of the information
:param info: the information for the current line
"""
if line_nr < self.lastItem:
raise ValueError("can only incrementally add items")
self.line_list.append(line_nr)
# to ensure reliability for the 'bisect_right' call in the function
# 'get_line_info_raw', make sure the lines in the line_list are sorted
self.line_list.sort()
self.line_dict[line_nr] = info
def values(self):
return self.line_dict.values()
class FileCommit:
def __init__(self):
#filename under investigation
self.filename = None
#dictionary of dictionaries key is commit, value is a
#dictionary with keys=lineNumbers value=commitHash, stores
#the line number and corresponding commit hash for every
#line of the file,
self.fileSnapShots = {}
#stores the commit hash of all contributions to the file for a
#particular revision
self.revCmts = []
# dictionary with key = line number, value = function name
file_level = -1
self.functionIds = {file_level:'File_Level'}
# list of function line numbers in sorted order, this is for
# optimizing the process of finding a function Id given a line number
self.functionLineNums = [file_level]
# Function Implementation
self.functionImpl = {}
# True if start/end boundaries of artefacts are available (otherwise,
# only the start of an artefact is known
self.artefact_line_range = False
# source code element list
# stores all source code elements of interest and
# meta data
# NOTE: This does never ever seem to be used. Discuss with
# Mitchell what this was all about
self._src_elem_list = []
# dictionaries with key = line number, value = feature list|feature expression
self.feature_info = FileDict()
self.feature_expression_info = FileDict()
#Getter/Setters
def getFileSnapShots(self):
return self.fileSnapShots
def getFileSnapShot(self):
return self.fileSnapShots.values()[0]
def getFilename(self):
return self.filename
def setCommitList(self, cmtList):
self.revCmts = cmtList
def getrevCmts(self):
return self.revCmts
def getFuncImpl(self,id):
if id in self.functionImpl:
return self.functionImpl[id]
else:
return []
def setFunctionLines(self, functionIds):
self.functionIds.update(functionIds)
for id in self.functionIds.values():
self.functionImpl.update({id:[]})
self.functionLineNums.extend(sorted(self.functionIds.iterkeys()))
def setSrcElems(self, src_elem_list):
self._src_elem_list.extend(src_elem_list)
def set_feature_infos(self, feature_line_infos):
self.feature_info = feature_line_infos[0]
self.feature_expression_info = feature_line_infos[1]
#Methods
def addFileSnapShot(self, key, dict):
self.fileSnapShots[key] = dict
def findFuncId(self, line_num):
# returns the identifier of a function given a line number
func_id = 'File_Level'
line_num = int(line_num)
if self.artefact_line_range == True:
if line_num in self.functionIds:
func_id = self.functionIds[line_num]
else:
i = bisect.bisect_right(self.functionLineNums, line_num)
func_line = self.functionLineNums[i-1]
func_id = self.functionIds[func_line]
return func_id
def getLineCmtId(self, line_num):
## Retrieve the first file snap
line_num = str(line_num)
file_snapshot = self.getFileSnapShot()
return file_snapshot[line_num]
def getLength(self):
return len(self.getFileSnapShot())
def getIndx(self):
return self.getFileSnapShot().keys()
def addFuncImplLine(self, lineNum, srcLine):
id = self.findFuncId(lineNum)
self.functionImpl[id].append(srcLine)
def findFeatureList(self, line_index):
return self.feature_info.get_line_info(int(line_index) + 1)
def findFeatureExpression(self, line_index):
return self.feature_expression_info.get_line_info(int(line_index) + 1)
| bockthom/codeface | codeface/fileCommit.py | Python | gpl-2.0 | 7,096 | 0.002114 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.model.mapper import get_mapped_doc
from frappe.utils import get_url, cint
from frappe.utils.user import get_user_fullname
from frappe.utils.print_format import download_pdf
from frappe.desk.form.load import get_attachments
from frappe.core.doctype.communication.email import make
from erpnext.accounts.party import get_party_account_currency, get_party_details
from erpnext.stock.doctype.material_request.material_request import set_missing_values
from erpnext.controllers.buying_controller import BuyingController
STANDARD_USERS = ("Guest", "Administrator")
class RequestforQuotation(BuyingController):
def validate(self):
self.validate_duplicate_supplier()
self.validate_common()
self.update_email_id()
def validate_duplicate_supplier(self):
supplier_list = [d.supplier for d in self.suppliers]
if len(supplier_list) != len(set(supplier_list)):
frappe.throw(_("Same supplier has been entered multiple times"))
def validate_common(self):
pc = frappe.get_doc('Purchase Common')
pc.validate_for_items(self)
def update_email_id(self):
for rfq_supplier in self.suppliers:
if not rfq_supplier.email_id:
rfq_supplier.email_id = frappe.db.get_value("Contact", rfq_supplier.contact, "email_id")
def validate_email_id(self, args):
if not args.email_id:
frappe.throw(_("Row {0}: For supplier {0} Email Address is required to send email").format(args.idx, args.supplier))
def on_submit(self):
frappe.db.set(self, 'status', 'Submitted')
def on_cancel(self):
frappe.db.set(self, 'status', 'Cancelled')
def send_to_supplier(self):
for rfq_supplier in self.suppliers:
if rfq_supplier.send_email:
self.validate_email_id(rfq_supplier)
# make new user if required
update_password_link = self.update_supplier_contact(rfq_supplier, self.get_link())
self.update_supplier_part_no(rfq_supplier)
self.supplier_rfq_mail(rfq_supplier, update_password_link, self.get_link())
def get_link(self):
# RFQ link for supplier portal
return get_url("/rfq/" + self.name)
def update_supplier_part_no(self, args):
self.vendor = args.supplier
for item in self.items:
item.supplier_part_no = frappe.db.get_value('Item Supplier',
{'parent': item.item_code, 'supplier': args.supplier}, 'supplier_part_no')
def update_supplier_contact(self, rfq_supplier, link):
'''Create a new user for the supplier if not set in contact'''
update_password_link = ''
if frappe.db.exists("User", rfq_supplier.email_id):
user = frappe.get_doc("User", rfq_supplier.email_id)
else:
user, update_password_link = self.create_user(rfq_supplier, link)
self.update_contact_of_supplier(rfq_supplier, user)
return update_password_link
def update_contact_of_supplier(self, rfq_supplier, user):
if rfq_supplier.contact:
contact = frappe.get_doc("Contact", rfq_supplier.contact)
else:
contact = frappe.new_doc("Contact")
contact.first_name = rfq_supplier.supplier_name or rfq_supplier.supplier
contact.supplier = rfq_supplier.supplier
if not contact.email_id and not contact.user:
contact.email_id = user.name
contact.user = user.name
contact.save(ignore_permissions=True)
def create_user(self, rfq_supplier, link):
user = frappe.get_doc({
'doctype': 'User',
'send_welcome_email': 0,
'email': rfq_supplier.email_id,
'first_name': rfq_supplier.supplier_name or rfq_supplier.supplier,
'user_type': 'Website User',
'redirect_url': link
})
user.save(ignore_permissions=True)
update_password_link = user.reset_password()
return user, update_password_link
def supplier_rfq_mail(self, data, update_password_link, rfq_link):
full_name = get_user_fullname(frappe.session['user'])
if full_name == "Guest":
full_name = "Administrator"
args = {
'update_password_link': update_password_link,
'message': frappe.render_template(self.message_for_supplier, data.as_dict()),
'rfq_link': rfq_link,
'user_fullname': full_name
}
subject = _("Request for Quotation")
template = "templates/emails/request_for_quotation.html"
sender = frappe.session.user not in STANDARD_USERS and frappe.session.user or None
message = frappe.get_template(template).render(args)
attachments = self.get_attachments()
self.send_email(data, sender, subject, message, attachments)
def send_email(self, data, sender, subject, message, attachments):
make(subject = subject, content=message,recipients=data.email_id,
sender=sender,attachments = attachments, send_email=True,
doctype=self.doctype, name=self.name)["name"]
frappe.msgprint(_("Email sent to supplier {0}").format(data.supplier))
def get_attachments(self):
attachments = [d.name for d in get_attachments(self.doctype, self.name)]
attachments.append(frappe.attach_print(self.doctype, self.name, doc=self))
return attachments
@frappe.whitelist()
def send_supplier_emails(rfq_name):
check_portal_enabled('Request for Quotation')
rfq = frappe.get_doc("Request for Quotation", rfq_name)
if rfq.docstatus==1:
rfq.send_to_supplier()
def check_portal_enabled(reference_doctype):
if not frappe.db.get_value('Portal Menu Item',
{'reference_doctype': reference_doctype}, 'enabled'):
frappe.throw(_("Request for Quotation is disabled to access from portal, for more check portal settings."))
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context["show_sidebar"] = True
return list_context
# This method is used to make supplier quotation from material request form.
@frappe.whitelist()
def make_supplier_quotation(source_name, for_supplier, target_doc=None):
def postprocess(source, target_doc):
target_doc.supplier = for_supplier
args = get_party_details(for_supplier, party_type="Supplier", ignore_permissions=True)
target_doc.currency = args.currency or get_party_account_currency('Supplier', for_supplier, source.company)
target_doc.buying_price_list = args.buying_price_list or frappe.db.get_value('Buying Settings', None, 'buying_price_list')
set_missing_values(source, target_doc)
doclist = get_mapped_doc("Request for Quotation", source_name, {
"Request for Quotation": {
"doctype": "Supplier Quotation",
"validation": {
"docstatus": ["=", 1]
}
},
"Request for Quotation Item": {
"doctype": "Supplier Quotation Item",
"field_map": {
"name": "request_for_quotation_item",
"parent": "request_for_quotation"
},
}
}, target_doc, postprocess)
return doclist
# This method is used to make supplier quotation from supplier's portal.
@frappe.whitelist()
def create_supplier_quotation(doc):
if isinstance(doc, basestring):
doc = json.loads(doc)
try:
sq_doc = frappe.get_doc({
"doctype": "Supplier Quotation",
"supplier": doc.get('supplier'),
"terms": doc.get("terms"),
"company": doc.get("company"),
"currency": doc.get('currency') or get_party_account_currency('Supplier', doc.get('supplier'), doc.get('company')),
"buying_price_list": doc.get('buying_price_list') or frappe.db.get_value('Buying Settings', None, 'buying_price_list')
})
add_items(sq_doc, doc.get('supplier'), doc.get('items'))
sq_doc.flags.ignore_permissions = True
sq_doc.run_method("set_missing_values")
sq_doc.save()
frappe.msgprint(_("Supplier Quotation {0} created").format(sq_doc.name))
return sq_doc.name
except Exception:
return None
def add_items(sq_doc, supplier, items):
for data in items:
if data.get("qty") > 0:
if isinstance(data, dict):
data = frappe._dict(data)
create_rfq_items(sq_doc, supplier, data)
def create_rfq_items(sq_doc, supplier, data):
sq_doc.append('items', {
"item_code": data.item_code,
"item_name": data.item_name,
"description": data.description,
"qty": data.qty,
"rate": data.rate,
"supplier_part_no": frappe.db.get_value("Item Supplier", {'parent': data.item_code, 'supplier': supplier}, "supplier_part_no"),
"warehouse": data.warehouse or '',
"request_for_quotation_item": data.name,
"request_for_quotation": data.parent
})
@frappe.whitelist()
def get_pdf(doctype, name, supplier_idx):
doc = get_rfq_doc(doctype, name, supplier_idx)
if doc:
download_pdf(doctype, name, doc=doc)
def get_rfq_doc(doctype, name, supplier_idx):
if cint(supplier_idx):
doc = frappe.get_doc(doctype, name)
args = doc.get('suppliers')[cint(supplier_idx) - 1]
doc.update_supplier_part_no(args)
return doc
@frappe.whitelist()
def get_item_from_material_requests_based_on_supplier(source_name, target_doc = None):
mr_items_list = frappe.db.sql("""
SELECT
mr.name, mr_item.item_code
FROM
`tabItem` as item,
`tabItem Supplier` as item_supp,
`tabMaterial Request Item` as mr_item,
`tabMaterial Request` as mr
WHERE item_supp.supplier = %(supplier)s
AND item.name = item_supp.parent
AND mr_item.parent = mr.name
AND mr_item.item_code = item.name
AND mr.status != "Stopped"
AND mr.material_request_type = "Purchase"
AND mr.docstatus = 1
AND mr.per_ordered < 99.99""", {"supplier": source_name}, as_dict=1)
material_requests = {}
for d in mr_items_list:
material_requests.setdefault(d.name, []).append(d.item_code)
for mr, items in material_requests.items():
target_doc = get_mapped_doc("Material Request", mr, {
"Material Request": {
"doctype": "Request for Quotation",
"validation": {
"docstatus": ["=", 1],
"material_request_type": ["=", "Purchase"],
}
},
"Material Request Item": {
"doctype": "Request for Quotation Item",
"condition": lambda row: row.item_code in items,
"field_map": [
["name", "material_request_item"],
["parent", "material_request"],
["uom", "uom"]
]
}
}, target_doc)
return target_doc
| kressi/erpnext | erpnext/buying/doctype/request_for_quotation/request_for_quotation.py | Python | gpl-3.0 | 10,014 | 0.027062 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('budgets', '0003_auto_20150717_0147'),
]
operations = [
migrations.RemoveField(
model_name='budgetelement',
name='subcategory',
),
]
| m1k3r/gvi-accounts | gvi/budgets/migrations/0004_remove_budgetelement_subcategory.py | Python | mit | 364 | 0 |
from vsvbp import container, solver
import argparse, sys, os, re
def parse(inputfile):
""" Parse a file using format from
Brandao et al. [Bin Packing and Related Problems: General Arc-flow Formulation with Graph Compression (2013)]
Format:
d (number of dimensions)
C_1 ... C_d capacities of the bins in each dimension
n number of different items
w^1_1 ... w^d_1 d_1 requirements of item 1 + {demand = number of such items}
...
w^1_n ... w^p_n d_n
Return: a list of items and a typical bin
"""
inp = inputfile
#inp = open(filename, 'r')
dim = int(inp.readline())
#if dim > 50: return False, False
cap = map(int, inp.readline().split())
assert dim == len(cap)
nitems = int(inp.readline())
items = []
i = 0
for line in inp:
req = map(int, line.split())
dem = req.pop()
assert len(req) == dim
items.extend([container.Item(req) for j in xrange(dem)])
i += 1
assert i == nitems
inp.close()
return items, container.Bin(cap)
def natural_sort(l):
convert = lambda text: int(text) if text.isdigit() else text.lower()
alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ]
return sorted(l, key = alphanum_key)
def get_subdirectories(directory):
dirs = [os.path.join(directory,name) for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
return natural_sort(dirs)
def get_files(directory):
files = [os.path.join(directory,name) for name in os.listdir(directory)
if os.path.isfile(os.path.join(directory, name))]
files.sort()
return natural_sort(files)
def optim_dir(directory, level=0):
files = get_files(directory)
for f in files:
optimize(f, level)
def optim_rec(directory, level=0):
subdir = get_subdirectories(directory)
print " "*level+ "|"+"- "+directory.split('/').pop()
if not subdir:
return optim_dir(directory, level+1)
for d in subdir:
optim_rec(d, level+1)
def optimize(filename, level=0):
fl = open(filename)
items, tbin = parse(fl)
if not items:
fl.close()
return
opt = len(solver.optimize(items, tbin, optimize.dp, optimize.seed).bins)
template = "{0:50}{1:10}"
if level == 0:
st = filename.split('/').pop()
print template.format(st, str(opt))
else:
st = " "*level+"| "+filename.split('/').pop()
print template.format(st, str(opt))
fl.close()
sys.stdout.flush()
def run():
parser = argparse.ArgumentParser(description="Run VSVBP heuristics on given instances")
parser.add_argument('-f', type=argparse.FileType('r'),
help="The path to a file containing the bin packing problem to optimize")
parser.add_argument('-d', help="A directory containing (only) files modeling\
bin packing problems to optimize. Optimize all files in the directory.")
parser.add_argument('-r', action='store_true', help="Recursive. If a directory is provided,\
optimize all files in all final subdirectories.")
parser.add_argument('-u', action='store_true', help="If activated, use dot product heuristics")
parser.add_argument('-s', type=int, help="Set seed to specified value")
args = parser.parse_args()
if not (args.f or args.d):
parser.error('No action requested, add -f or -d')
if args.f and args.d:
parser.error('Too many actions requested, add only -f or -d')
if args.r and not args.d:
sys.stderr.write("Warning recursive argument was specified but")
sys.stderr.write(" no directory was provided. Argument ignored.\n")
if args.d and not os.path.isdir(args.d):
parser.error('Invalid directory')
optimize.dp = args.u
optimize.seed = args.s
if args.f:
items, tbin = parse(args.f)
opt = len(solver.optimize(items, tbin, args.u, args.s).bins)
template = "{0:50}{1:10}"
st = args.f.name.split('/').pop()
print template.format(st, str(opt))
elif not args.r:
optim_dir(args.d)
else:
optim_rec(args.d)
if __name__ == "__main__":
run()
| mgabay/Variable-Size-Vector-Bin-Packing | scripts/vbp-optim.py | Python | gpl-3.0 | 4,279 | 0.00631 |
# Copyright 2012-2013 Eric Ptak - trouch.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from myDevices.utils.types import toint
from myDevices.devices.i2c import I2C
from myDevices.devices.digital import GPIOPort
class PCF8574(I2C, GPIOPort):
FUNCTIONS = [GPIOPort.IN for i in range(8)]
def __init__(self, slave=0x20):
slave = toint(slave)
if slave in range(0x20, 0x28):
self.name = "PCF8574"
elif slave in range(0x38, 0x40):
self.name = "PCF8574A"
else:
raise ValueError("Bad slave address for PCF8574(A) : 0x%02X not in range [0x20..0x27, 0x38..0x3F]" % slave)
I2C.__init__(self, slave)
GPIOPort.__init__(self, 8)
self.portWrite(0xFF)
self.portRead()
def __str__(self):
return "%s(slave=0x%02X)" % (self.name, self.slave)
def __getFunction__(self, channel):
return self.FUNCTIONS[channel]
def __setFunction__(self, channel, value):
if not value in [self.IN, self.OUT]:
raise ValueError("Requested function not supported")
self.FUNCTIONS[channel] = value
def __digitalRead__(self, channel):
mask = 1 << channel
d = self.readByte()
return (d & mask) == mask
def __portRead__(self):
return self.readByte()
def __digitalWrite__(self, channel, value):
mask = 1 << channel
b = self.readByte()
if value:
b |= mask
else:
b &= ~mask
self.writeByte(b)
def __portWrite__(self, value):
self.writeByte(value)
class PCF8574A(PCF8574):
def __init__(self, slave=0x38):
PCF8574.__init__(self, slave)
| myDevicesIoT/Cayenne-Agent | myDevices/devices/digital/pcf8574.py | Python | mit | 2,274 | 0.006157 |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Detect and count number of ladders in a directory of SGF files
Afterwards it maybe useful to inspect the files or thumbnails
find . -iname '*.sgf' | xargs -P4 -I{} gogui-thumbnailer -size 256 {} {}.png
"""
import sys
sys.path.insert(0, '.')
import os
from collections import Counter, defaultdict
from absl import app
from tqdm import tqdm
from sgfmill import sgf, sgf_moves
import oneoff_utils
ADJACENT_FOR_LADDER = 10
def subtract(a, b):
return (b[0] - a[0], b[1] - a[1])
def manhattanDistance(a, b):
d = subtract(a, b)
return abs(d[0]) + abs(d[1])
def isLadderIsh(game_path):
with open(game_path) as sgf_file:
game_data = sgf_file.read().encode('utf-8')
g = sgf.Sgf_game.from_bytes(game_data)
_, moves = sgf_moves.get_setup_and_moves(g)
mostAdjacent = 0
mostStart = 0
# colorStart, moveStart
cS, mS = -1, (-2, -2)
adjacent = 0
# colorLast, moveLast
cL, mL = cS, mS
for i, (c, m) in enumerate(moves, 1):
if m is None:
continue
newColor = c != cL
dS = subtract(mS, m)
dL = manhattanDistance(mL, m)
diagonalDistance = abs(abs(dS[0]) - abs(dS[1]))
isLadder = ((c == cS and diagonalDistance <= 1 and dL == 2) or
(c != cS and diagonalDistance <= 2 and dL == 1))
if newColor and isLadder:
adjacent += 1
if adjacent > mostAdjacent:
mostAdjacent = adjacent
mostStart = i - adjacent
else:
cS = c
mS = m
adjacent = 0
cL, mL = c, m
if mostAdjacent >= ADJACENT_FOR_LADDER:
return (mostAdjacent, mostStart)
return None
def main(unused_argv):
assert len(unused_argv) == 2, unused_argv
sgf_dir = unused_argv[1]
sgf_dir += '/' * (sgf_dir[-1] != '/')
sgf_files = oneoff_utils.find_and_filter_sgf_files(sgf_dir)
per_folder = defaultdict(lambda: [0,0])
lengths = Counter()
ladders = []
for name in tqdm(sorted(sgf_files)):
folder = os.path.dirname(name[len(sgf_dir):])
per_folder[folder][0] += 1
ladderAt = isLadderIsh(name)
if ladderAt:
ladders.append((name, ladderAt))
lengths[ladderAt[0]] += 1
per_folder[folder][1] += 1
print("Ladderish({}): {}, {}".format(len(ladders), ladderAt, name))
from shutil import copyfile
replace = '/ladder/' + ('yes' if ladderAt else 'no') + '/'
copyfile(name, name.replace('/ladder/', replace))
print()
stars_per = max(max(lengths.values()) / 50, 1)
for length, c in sorted(lengths.items()):
print("{:2d} ({:<4d}): {}".format(length, c, "*" * int(c / stars_per)))
print()
if len(per_folder) > 1:
for folder, counts in sorted(per_folder.items()):
if not folder.endswith('/'): folder += "/"
print("{}/{} ({:.1f}%) {}".format(
counts[1], counts[0], 100 * counts[1] / counts[0], folder))
count = len(ladders)
print("{:3d}/{:<4d} ({:.1f}%) overall".format(
count, len(sgf_files), 100 * count / len(sgf_files)))
if __name__ == "__main__":
app.run(main)
| lablup/sorna-repl | vendor/benchmark/minigo/oneoffs/ladder_detector.py | Python | lgpl-3.0 | 3,787 | 0.003169 |
import os
import sys
import subprocess
from contextlib import contextmanager
import argparse
import glob
ENV_ROOT = 'test_ambertools'
AMBER_VERSION = 'amber17'
def is_conda_package(package_dir):
basename = os.path.basename(package_dir)
return not (basename.startswith('osx') or basename.startswith('linux'))
def run_test(package_dir, amberhome, TEST_SCRIPT):
if is_conda_package(package_dir):
subprocess.check_call('bash {}'.format(TEST_SCRIPT), shell=True)
else:
subprocess.check_call(
"source {}/amber.sh && bash {}".format(amberhome, TEST_SCRIPT),
shell=True)
def install_ambertools(package_dir,
env_name,
tmp_dir='junk_folder',
pyver='2.7'):
if is_conda_package(package_dir):
# conda
subprocess.check_call(
'conda install {} -n {}'.format(package_dir, env_name), shell=True)
else:
amberhome = os.path.abspath(os.path.join(tmp_dir, AMBER_VERSION))
# non-conda
try:
os.mkdir(tmp_dir)
except OSError:
pass
os.chdir(tmp_dir)
if os.path.exists(AMBER_VERSION):
print("Existing {}. Skip untar".format(AMBER_VERSION))
else:
subprocess.check_call(['tar', '-xf', package_dir])
# os.environ['AMBERHOME'] = amberhome
# os.environ['PYTHONPATH'] = os.path.join(amberhome,
# 'lib/python{}/site-packages'.format(pyver))
# os.environ['PATH'] = os.path.join(amberhome, 'bin') + ':' + os.getenv("PATH")
def find_miniconda_root():
command = "conda info --base"
return subprocess.check_output(command, shell=True).decode().strip()
def create_env(env, python_version):
sys.stdout.write('creating {} env'.format(env))
cmlist = 'conda create -n {} python={} numpy nomkl --yes'.format(
env, python_version)
print(cmlist)
subprocess.check_call(cmlist.split())
@contextmanager
def run_env(env_name, python_version):
os.environ['PYTHONPATH'] = ''
ORIG_PATH = os.environ['PATH']
env_path = find_miniconda_root() + '/envs/' + env_name
env_bin_dir = env_path + '/bin/'
os.environ['CONDA_PREFIX'] = env_path
os.environ['PATH'] = env_bin_dir + ':' + ORIG_PATH
if not os.path.exists(find_miniconda_root() + '/envs/' + env_name):
create_env(env_name, python_version)
os.system('source activate {}'.format(env_name))
yield
os.system('conda env remove -n {} -y'.format(env_name))
os.environ['PATH'] = ORIG_PATH
def ensure_no_gfortran_local(amberhome):
errors = []
for fn in get_tested_files(amberhome):
cmd = ['otool', '-L', fn]
try:
output = subprocess.check_output(
cmd, stderr=subprocess.PIPE).decode()
except subprocess.CalledProcessError:
output = ''
if '/usr/local/gfortran' in output:
errors.append(fn)
return errors
def get_so_files(dest):
cmd = 'find {} -type f -name "*.so"'.format(dest)
print('cmd: {}'.format(cmd))
output = subprocess.check_output(cmd, shell=True)
output = output.decode()
files = [fn for fn in output.split('\n') if fn]
return files
def get_tested_files(dest):
so_files = get_so_files(dest)
# files_in_bin = [os.path.join(dest, 'bin', fn)
# for fn in ['cpptraj', 'sqm', 'mdgx']]
files_in_bin = glob.glob(os.path.join(dest, 'bin/*'))
return [
fn
for fn in so_files + files_in_bin + glob.glob(
os.path.join(dest, 'bin/to_be_dispatched/*')) + glob.glob(
os.path.join(dest, 'lib/*dylib'))
]
def main(args=None):
parser = argparse.ArgumentParser()
parser.add_argument("package_dir")
parser.add_argument("-py", dest='pyvers')
opt = parser.parse_args(args)
package_dir = opt.package_dir
tmp_dir = 'junk_folder' # only exists if non-conda package
conda_recipe = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'conda-ambertools-single-python'))
TEST_SCRIPT = '{}/run_test.sh'.format(conda_recipe)
print('conda_recipe', conda_recipe)
print('run_test', run_test)
pyvers = [
opt.pyvers,
] if opt.pyvers else ['2.7', '3.4', '3.5', '3.6', '3.7']
print('Python versions = {}'.format(pyvers))
print('conda package = {}'.format(is_conda_package(package_dir)))
errors = []
for py in pyvers:
env_name = ENV_ROOT + py
with run_env(env_name, py):
if is_conda_package(package_dir):
amberhome = find_miniconda_root() + '/envs/' + env_name
else:
# do not set CONDA_PREFIX to trigger
# unset PYTHONPATH in run_test.sh in this case.
os.environ['CONDA_PREFIX'] = ''
amberhome = os.path.join(
os.path.abspath(tmp_dir), AMBER_VERSION)
install_ambertools(package_dir, env_name, pyver=py)
if sys.platform.startswith('darwin'):
errors = ensure_no_gfortran_local(amberhome)
run_test(package_dir, amberhome, TEST_SCRIPT)
# check libgfortran
if errors:
print(
"ERROR: Files should not have /usr/local/gfortran in its content"
)
print(errors)
sys.exit(1)
else:
print("libgfortran fixed. Wonderful")
if __name__ == '__main__':
main()
| Amber-MD/ambertools-conda-build | conda_tools/validate_ambertools_build.py | Python | mit | 5,502 | 0.000545 |
"""Two dimensional checkerboard lattice with real hoppings"""
import pybinding as pb
import matplotlib.pyplot as plt
from math import pi
pb.pltutils.use_style()
def checkerboard(d=0.2, delta=1.1, t=0.6):
lat = pb.Lattice(a1=[d, 0], a2=[0, d])
lat.add_sublattices(
('A', [0, 0], -delta),
('B', [d/2, d/2], delta)
)
lat.add_hoppings(
([ 0, 0], 'A', 'B', t),
([ 0, -1], 'A', 'B', t),
([-1, 0], 'A', 'B', t),
([-1, -1], 'A', 'B', t)
)
return lat
lattice = checkerboard()
lattice.plot()
plt.show()
lattice.plot_brillouin_zone()
plt.show()
model = pb.Model(checkerboard(), pb.translational_symmetry())
solver = pb.solver.lapack(model)
bands = solver.calc_bands([0, 0], [0, 5*pi], [5*pi, 5*pi], [0, 0])
bands.plot()
plt.show()
| dean0x7d/pybinding | docs/examples/lattice/checkerboard.py | Python | bsd-2-clause | 801 | 0.003745 |
import json
from django.core.urlresolvers import reverse
from django.http import HttpResponseNotFound
from django.test import TestCase
from mock import Mock
from utils import use_GET_in
from api.views import msas, tables
class ConversionTest(TestCase):
def test_use_GET_in(self):
fn, request = Mock(), Mock()
request.GET.lists.return_value = [('param1', [0]), ('param2', [-1])]
# Dictionaries become JSON
fn.return_value = {'a': 1, 'b': 2}
response = use_GET_in(fn, request)
self.assertEqual(json.loads(response.content), {'a': 1, 'b': 2})
self.assertEqual(fn.call_args[0][0], {'param1': [0], 'param2': [-1]})
# Everything else is unaltered
fn.return_value = HttpResponseNotFound('Oh noes')
response = use_GET_in(fn, request)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.content, 'Oh noes')
class ViewsTests(TestCase):
fixtures = ['agency.json', 'fake_msa.json', 'api_tracts.json', 'test_counties.json', 'fake_respondents.json']
def test_api_all_user_errors(self):
resp = self.client.get(reverse('all'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'',
'swLon':'-88.225583',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
resp = self.client.get(reverse('all'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'41.597775',
'swLon':'',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
def test_api_msas_user_errors(self):
resp = self.client.get(reverse('msas'))
self.assertEqual(resp.status_code, 404)
resp = self.client.get(reverse('msas'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'',
'swLon':'-88.225583',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
resp = self.client.get(reverse('msas'), {'neLat':'42.048794',
'neLon':'-87.430698',
'swLat':'41.597775',
'swLon':'',
'year':'2013',
'action_taken':'1,2,3,4,5',
'lender':'736-4045996'})
self.assertEqual(resp.status_code, 404)
def test_api_msas_endpoint(self):
"""should return a list of MSA ids in view"""
coords = {'neLat': '36.551569', 'neLon':'-78.961487', 'swLat':'35.824494', 'swLon':'-81.828918'}
url = reverse(msas)
resp = self.client.get(url, coords)
result_list = json.loads(resp.content)
self.assertTrue(isinstance(result_list, list))
self.assertContains(resp, '49180')
def test_api_tables_endpoint(self):
"""should return table_data json for a lender/MSA pair"""
params = {'lender': '90000451965', 'metro': '49180'}
url = reverse(tables)
resp = self.client.get(url, params)
result_dict = json.loads(resp.content)
self.assertTrue(isinstance(result_dict, dict))
keys = ['counties', 'msa']
lender_keys = ['hma_pct', 'lma_pct', 'mma_pct', 'lma', 'mma', 'hma', 'lar_total', 'peer_hma_pct', 'peer_lma_pct', 'peer_mma_pct', 'peer_lma', 'peer_mma', 'peer_hma', 'peer_lar_total', 'odds_lma', 'odds_mma', 'odds_hma']
for key in keys:
self.assertTrue(key in result_dict.keys())
for key in lender_keys:
self.assertTrue(key in result_dict['msa'].keys())
self.assertTrue(len(result_dict['msa']) > 0)
| mehtadev17/mapusaurus | mapusaurus/api/tests.py | Python | cc0-1.0 | 4,338 | 0.014292 |
# -*- coding: utf-8 -*-
import os
def onemode(values):
return max(set(values), key=values.count)
os.system("pause")
| NicovincX2/Python-3.5 | Statistiques/Statistique descriptive/Mode (statistiques)/one_mode.py | Python | gpl-3.0 | 124 | 0.008065 |
"""Unit tests for psrfits_to_sdfits.py."""
import unittest
import sys
import scipy as sp
import numpy.random as rand
import psrfits_to_sdfits as p2s
import kiyopy.custom_exceptions as ce
class TestFormatData(unittest.TestCase) :
def setUp(self) :
self.ntime = 5
self.npol = 4
self.nfreq = 10
self.good_data = sp.empty((self.ntime, self.npol, self.nfreq),
dtype=int)
self.good_data[:,:,:] = sp.reshape(sp.arange(self.ntime*self.nfreq),
(self.ntime, 1, self.nfreq))
self.good_data[:,0,:] += 100
self.good_data[:,1:,:] -= self.ntime*self.nfreq//2
self.raw_data = sp.empty((self.ntime, self.npol, self.nfreq),
dtype=sp.uint8)
self.raw_data[:,0,:] = self.good_data[:,0,:]
self.raw_data.dtype = sp.int8
self.raw_data[:,1:,:] = self.good_data[:,1:,:]
self.raw_data.dtype = sp.uint8
self.raw_data = self.raw_data.flatten()
def test_runs(self) :
p2s.format_data(self.raw_data, self.ntime, self.npol, self.nfreq)
def test_requires_uint(self) :
self.assertRaises(TypeError, p2s.format_data, self.good_data,
self.ntime, self.npol, self.nfreq)
def test_right_answer(self):
reformated = p2s.format_data(self.raw_data, self.ntime, self.npol,
self.nfreq)
self.assertTrue(sp.allclose(reformated, self.good_data))
class TestFoldOnCal(unittest.TestCase) :
def setUp(self):
self.ntime = 2048
self.nfreq = 10
self.data = sp.zeros((self.ntime, 4, self.nfreq))
self.n_bins_cal = 64
# Set channel dependant gain.
self.level = 0.1*(self.nfreq + sp.arange(self.nfreq))
# Add noise.
self.data[:,:,:] += (0.1 * self.level
* rand.randn(self.ntime, 4, self.nfreq))
# Add DC level.
self.dc = 10 * self.level
self.data += self.dc
# First can transition.
self.first_trans = rand.randint(0, self.n_bins_cal // 2)
# The following randomly assigns self.neg to -1 or 1.
self.neg = 0
while not self.neg: self.neg = rand.randint(-1, 2)
# First upward edge:
if self.neg == 1:
self.offset = self.first_trans
else:
self.offset = self.first_trans + self.n_bins_cal // 2
self.data[:,0,:] += self.level
for ii in range(self.ntime//self.n_bins_cal) :
s = slice(self.first_trans + ii*self.n_bins_cal, self.first_trans +
(2*ii+1)*self.n_bins_cal//2)
self.data[s, 0, :] += self.neg * self.level
# Transition values and locations.
self.t_slice = slice(self.first_trans, sys.maxint, self.n_bins_cal//2)
self.t_vals = 0.5 + 0.1 * rand.randn(2*self.ntime//self.n_bins_cal,
self.nfreq)
self.t_vals *= - self.level
def test_runs(self) :
p2s.get_cal_mask(self.data, self.n_bins_cal)
def test_right_answer_basic(self) :
first_ind_on, n_blank = p2s.get_cal_mask(self.data, self.n_bins_cal)
self.assertEqual(first_ind_on, (self.offset + 1) % self.n_bins_cal)
self.assertEqual(n_blank, 2)
def test_right_answer_partial(self) :
self.data[self.t_slice, 0, :] += self.t_vals
first_ind_on, n_blank = p2s.get_cal_mask(self.data, self.n_bins_cal)
self.assertEqual(first_ind_on, (self.offset + 1) % self.n_bins_cal)
self.assertEqual(n_blank, 1)
def test_checks_cal_per(self) :
self.assertRaises(ValueError, p2s.get_cal_mask, self.data,
self.n_bins_cal + 1)
def test_fails_to_many_transitions(self) :
self.data[self.t_slice, 0, :] += self.t_vals
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal*2)
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal//2)
def test_fails_any_nan(self) :
self.data[self.t_slice,0,:] = float('nan')
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fails_offs_in_ons(self) :
self.data[self.t_slice, 0, :] += self.t_vals
s = slice((self.offset + 7) % self.n_bins_cal, sys.maxint,
self.n_bins_cal)
self.data[s, :, :] = self.dc
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fails_late_on(self) :
self.data[self.t_slice, 0, :] = self.dc
s = slice(self.offset+1, sys.maxint, self.n_bins_cal)
self.data[s, :, :] = self.dc
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fails_to_many_semi_bins(self) :
self.data[self.t_slice, 0, :] += self.t_vals
s = slice((self.offset + 7) % self.n_bins_cal, sys.maxint,
self.n_bins_cal)
self.data[s, :, :] = self.dc + self.level * 0.7
self.assertRaises(ce.DataError, p2s.get_cal_mask, self.data,
self.n_bins_cal)
def test_fast_flagger(self):
for ii in range(self.ntime * self.nfreq * 4 // self.n_bins_cal // 10):
#for ii in range(3):
i_f = rand.randint(0, self.nfreq)
i_t = rand.randint(0, self.ntime)
i_p = rand.randint(0, 4)
self.data[i_t,i_p,i_f] += self.level[i_f] * 5
data, weights = p2s.separate_cal(self.data, self.n_bins_cal, flag=10)
right_answer = sp.empty((4, 2, self.nfreq))
right_answer[...] = self.dc
right_answer[0,0,:] += self.level
self.assertTrue(sp.allclose(data, right_answer, atol=self.level / 10))
self.assertTrue(sp.all(weights <= 1.))
kept_fraction = 1. - 4./self.n_bins_cal - (4./self.n_bins_cal/10)
self.assertTrue(sp.allclose(sp.mean(weights), kept_fraction, rtol=1e-3))
class TestSeparateCal(unittest.TestCase) :
"""Unlike the tests for get_cal_mask, these tests are tightly controled
with no noise so we can detect deviations from expected."""
def setUp(self) :
self.ntime = 2048
self.nfreq = 10
self.data = sp.zeros((self.ntime, 4, self.nfreq))
self.n_bins_cal = 64
self.offset = 10
def post_setup(self) :
if self.offset > self.n_bins_cal//2 :
last_on_start = (self.offset + self.n_bins_cal//2)% self.n_bins_cal
self.data[:last_on_start, :, :] = 1
for ii in range(self.ntime//self.n_bins_cal) :
s = slice(self.offset + ii*self.n_bins_cal, self.offset +
(2*ii+1)*self.n_bins_cal//2)
self.data[s, :, :] = 1
self.t_slice_on = slice(self.offset, sys.maxint, self.n_bins_cal)
self.t_slice_off = slice((self.offset +
self.n_bins_cal//2)%self.n_bins_cal,
sys.maxint, self.n_bins_cal)
def check_answer(self) :
data = self.data.copy()
outdata, weights = p2s.separate_cal(data, self.n_bins_cal, flag=-1)
self.assertTrue(sp.allclose(outdata[:,:,0,:], 1.0))
self.assertTrue(sp.allclose(outdata[:,:,1,:], 0.0))
data = self.data.copy()
outdata, weights = p2s.separate_cal(data, self.n_bins_cal, flag=10)
self.assertTrue(sp.allclose(outdata[:,:,0,:], 1.0))
self.assertTrue(sp.allclose(outdata[:,:,1,:], 0.0))
def test_works_no_transition(self) :
self.post_setup()
self.check_answer()
def test_works_transition(self) :
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Move the offset to the the second half and make sure it works.
def test_works_no_transition_late(self) :
self.offset = 57
self.post_setup()
self.check_answer()
def test_works_transition_late(self) :
self.offset = 57
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Test offset = 63
def test_works_no_transition__1(self) :
self.offset = 63
self.post_setup()
self.check_answer()
def test_works_transition__1(self) :
self.offset = 63
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Test offset = 32
def test_works_no_transition_32(self) :
self.offset = 32
self.post_setup()
self.check_answer()
def test_works_transition_32(self) :
self.offset = 32
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
# Test offset = 0
def test_works_no_transition_0(self) :
self.offset = 0
self.post_setup()
self.check_answer()
def test_works_transition_0(self) :
self.offset = 0
self.post_setup()
self.data[self.t_slice_off, :, :] = 0.3
self.data[self.t_slice_on, :, :] = 0.5
self.check_answer()
if __name__ == '__main__' :
unittest.main()
| kiyo-masui/SDdata | sddata/tests/test_psrfits_to_sdfits.py | Python | gpl-2.0 | 9,556 | 0.009209 |
# -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import os
from datetime import datetime
from .hooks import dispatch_hook, HOOKS
from .structures import CaseInsensitiveDict
from .status_codes import codes
from .auth import HTTPBasicAuth, HTTPProxyAuth
from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3 import connectionpool, poolmanager
from .packages.urllib3.filepost import encode_multipart_formdata
from .defaults import SCHEMAS
from .exceptions import (
ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
URLRequired, SSLError, MissingSchema, InvalidSchema)
from .utils import (
get_encoding_from_headers, stream_untransfer, guess_filename, requote_uri,
dict_from_string, stream_decode_response_unicode, get_netrc_auth)
from .compat import (
urlparse, urlunparse, urljoin, urlsplit, urlencode, str, bytes,
SimpleCookie, is_py2)
# Import chardet if it is available.
try:
import chardet
except ImportError:
pass
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
class Request(object):
"""The :class:`Request <Request>` object. It carries out all functionality of
Requests. Recommended interface is with the Requests functions.
"""
def __init__(self,
url=None,
headers=dict(),
files=None,
method=None,
data=dict(),
params=dict(),
auth=None,
cookies=None,
timeout=None,
redirect=False,
allow_redirects=False,
proxies=None,
hooks=None,
config=None,
_poolmanager=None,
verify=None,
session=None,
cert=None):
#: Dictionary of configurations for this request.
self.config = dict(config or [])
#: Float describes the timeout of the request.
# (Use socket.setdefaulttimeout() as fallback)
self.timeout = timeout
#: Request URL.
self.url = url
#: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
self.headers = dict(headers or [])
#: Dictionary of files to multipart upload (``{filename: content}``).
self.files = files
#: HTTP Method to use.
self.method = method
#: Dictionary or byte of request body data to attach to the
#: :class:`Request <Request>`.
self.data = None
#: Dictionary or byte of querystring data to attach to the
#: :class:`Request <Request>`.
self.params = None
#: True if :class:`Request <Request>` is part of a redirect chain (disables history
#: and HTTPError storage).
self.redirect = redirect
#: Set to True if full redirects are allowed (e.g. re-POST-ing of data at new ``Location``)
self.allow_redirects = allow_redirects
# Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'})
self.proxies = dict(proxies or [])
# If no proxies are given, allow configuration by environment variables
# HTTP_PROXY and HTTPS_PROXY.
if not self.proxies and self.config.get('trust_env'):
if 'HTTP_PROXY' in os.environ:
self.proxies['http'] = os.environ['HTTP_PROXY']
if 'HTTPS_PROXY' in os.environ:
self.proxies['https'] = os.environ['HTTPS_PROXY']
self.data, self._enc_data = self._encode_params(data)
self.params, self._enc_params = self._encode_params(params)
#: :class:`Response <Response>` instance, containing
#: content and metadata of HTTP Response, once :attr:`sent <send>`.
self.response = Response()
#: Authentication tuple or object to attach to :class:`Request <Request>`.
self.auth = auth
#: CookieJar to attach to :class:`Request <Request>`.
self.cookies = dict(cookies or [])
#: True if Request has been sent.
self.sent = False
#: Event-handling hooks.
self.hooks = {}
for event in HOOKS:
self.hooks[event] = []
hooks = hooks or {}
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
#: Session.
self.session = session
#: SSL Verification.
self.verify = verify
#: SSL Certificate
self.cert = cert
if headers:
headers = CaseInsensitiveDict(self.headers)
else:
headers = CaseInsensitiveDict()
# Add configured base headers.
for (k, v) in list(self.config.get('base_headers', {}).items()):
if k not in headers:
headers[k] = v
self.headers = headers
self._poolmanager = _poolmanager
def __repr__(self):
return '<Request [%s]>' % (self.method)
def _build_response(self, resp):
"""Build internal :class:`Response <Response>` object
from given response.
"""
def build(resp):
response = Response()
# Pass settings over.
response.config = self.config
if resp:
# Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
# Set encoding.
response.encoding = get_encoding_from_headers(response.headers)
# Start off with our local cookies.
cookies = self.cookies or dict()
# Add new cookies from the server.
if 'set-cookie' in response.headers:
cookie_header = response.headers['set-cookie']
cookies = dict_from_string(cookie_header)
# Save cookies in Response.
response.cookies = cookies
# No exceptions were harmed in the making of this request.
response.error = getattr(resp, 'error', None)
# Save original response for later.
response.raw = resp
if isinstance(self.full_url, bytes):
response.url = self.full_url.decode('utf-8')
else:
response.url = self.full_url
return response
history = []
r = build(resp)
self.cookies.update(r.cookies)
if r.status_code in REDIRECT_STATI and not self.redirect:
while (('location' in r.headers) and
((r.status_code is codes.see_other) or (self.allow_redirects))):
r.content # Consume socket so it can be released
if not len(history) < self.config.get('max_redirects'):
raise TooManyRedirects()
# Release the connection back into the pool.
r.raw.release_conn()
history.append(r)
url = r.headers['location']
data = self.data
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(r.url)
url = '%s:%s' % (parsed_rurl.scheme, url)
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
if not urlparse(url).netloc:
url = urljoin(r.url,
# Compliant with RFC3986, we percent
# encode the url.
requote_uri(url))
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if r.status_code is codes.see_other:
method = 'GET'
data = None
else:
method = self.method
# Do what the browsers do if strict_mode is off...
if (not self.config.get('strict_mode')):
if r.status_code in (codes.moved, codes.found) and self.method == 'POST':
method = 'GET'
data = None
if (r.status_code == 303) and self.method != 'HEAD':
method = 'GET'
data = None
# Remove the cookie headers that were sent.
headers = self.headers
try:
del headers['Cookie']
except KeyError:
pass
request = Request(
url=url,
headers=headers,
files=self.files,
method=method,
params=self.session.params,
auth=self.auth,
cookies=self.cookies,
redirect=True,
data=data,
config=self.config,
timeout=self.timeout,
_poolmanager=self._poolmanager,
proxies=self.proxies,
verify=self.verify,
session=self.session,
cert=self.cert
)
request.send()
r = request.response
self.cookies.update(r.cookies)
r.history = history
self.response = r
self.response.request = self
self.response.cookies.update(self.cookies)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
If the data supplied is a dictionary, encodes each parameter in it, and
returns a list of tuples containing the encoded parameters, and a urlencoded
version of that.
Otherwise, assumes the data is already encoded appropriately, and
returns it twice.
"""
if isinstance(data, bytes):
return data, data
if hasattr(data, '__iter__') and not isinstance(data, str):
data = dict(data)
if hasattr(data, 'items'):
result = []
for k, vs in list(data.items()):
for v in isinstance(vs, list) and vs or [vs]:
result.append((k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return result, urlencode(result, doseq=True)
else:
return data, data
@property
def full_url(self):
"""Build the actual URL to use."""
if not self.url:
raise URLRequired()
url = self.url
# Support for unicode domain names and paths.
scheme, netloc, path, params, query, fragment = urlparse(url)
if not scheme:
raise MissingSchema("Invalid URL %r: No schema supplied" % url)
if not scheme in SCHEMAS:
raise InvalidSchema("Invalid scheme %r" % scheme)
netloc = netloc.encode('idna').decode('utf-8')
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(params, str):
params = params.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
url = (urlunparse([scheme, netloc, path, params, query, fragment]))
if self._enc_params:
if urlparse(url).query:
url = '%s&%s' % (url, self._enc_params)
else:
url = '%s?%s' % (url, self._enc_params)
if self.config.get('encode_uri', True):
url = requote_uri(url)
return url
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.full_url)
# Proxies use full URLs.
if p.scheme in self.proxies:
return self.full_url
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
def register_hook(self, event, hook):
"""Properly register a hook."""
return self.hooks[event].append(hook)
def send(self, anyway=False, prefetch=False):
"""Sends the request. Returns True of successful, False if not.
If there was an HTTPError during transmission,
self.response.status_code will contain the HTTPError code.
Once a request is successfully sent, `sent` will equal True.
:param anyway: If True, request will be sent, even if it has
already been sent.
"""
# Build the URL
url = self.full_url
# Logging
if self.config.get('verbose'):
self.config.get('verbose').write('%s %s %s\n' % (
datetime.now().isoformat(), self.method, url
))
# Nottin' on you.
body = None
content_type = None
# Multi-part file uploads.
if self.files:
if not isinstance(self.data, str):
try:
fields = self.data.copy()
except AttributeError:
fields = dict(self.data)
for (k, v) in list(self.files.items()):
# support for explicit filename
if isinstance(v, (tuple, list)):
fn, fp = v
else:
fn = guess_filename(v) or k
fp = v
fields.update({k: (fn, fp.read())})
(body, content_type) = encode_multipart_formdata(fields)
else:
pass
# TODO: Conflict?
else:
if self.data:
body = self._enc_data
if isinstance(self.data, str):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
# Use .netrc auth if none was provided.
if not self.auth and self.config.get('trust_env'):
self.auth = get_netrc_auth(url)
if self.auth:
if isinstance(self.auth, tuple) and len(self.auth) == 2:
# special-case basic HTTP auth
self.auth = HTTPBasicAuth(*self.auth)
# Allow auth to make its changes.
r = self.auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
_p = urlparse(url)
proxy = self.proxies.get(_p.scheme)
if proxy:
conn = poolmanager.proxy_from_url(proxy)
_proxy = urlparse(proxy)
if '@' in _proxy.netloc:
auth, url = _proxy.netloc.split('@', 1)
self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
r = self.proxy_auth(self)
self.__dict__.update(r.__dict__)
else:
# Check to see if keep_alive is allowed.
if self.config.get('keep_alive'):
conn = self._poolmanager.connection_from_url(url)
else:
conn = connectionpool.connection_from_url(url)
if url.startswith('https') and self.verify:
cert_loc = None
# Allow self-specified cert location.
if self.verify is not True:
cert_loc = self.verify
# Look for configuration.
if not cert_loc and self.config.get('trust_env'):
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
# Curl compatiblity.
if not cert_loc and self.config.get('trust_env'):
cert_loc = os.environ.get('CURL_CA_BUNDLE')
# Use the awesome certifi list.
if not cert_loc:
cert_loc = __import__('certifi').where()
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if self.cert and self.verify:
if len(self.cert) == 2:
conn.cert_file = self.cert[0]
conn.key_file = self.cert[1]
else:
conn.cert_file = self.cert
if not self.sent or anyway:
if self.cookies:
# Skip if 'cookie' header is explicitly set.
if 'cookie' not in self.headers:
# Simple cookie with our dict.
c = SimpleCookie()
for (k, v) in list(self.cookies.items()):
c[k] = v
# Turn it into a header.
cookie_header = c.output(header='', sep='; ').strip()
# Attach Cookie header to request.
self.headers['Cookie'] = cookie_header
# Pre-request hook.
r = dispatch_hook('pre_request', self.hooks, self)
self.__dict__.update(r.__dict__)
try:
# The inner try .. except re-raises certain exceptions as
# internal exception types; the outer suppresses exceptions
# when safe mode is set.
try:
# Send the request.
r = conn.urlopen(
method=self.method,
url=self.path_url,
body=body,
headers=self.headers,
redirect=False,
assert_same_host=False,
preload_content=False,
decode_content=False,
retries=self.config.get('max_retries', 0),
timeout=self.timeout,
)
self.sent = True
except MaxRetryError as e:
raise ConnectionError(e)
except (_SSLError, _HTTPError) as e:
if self.verify and isinstance(e, _SSLError):
raise SSLError(e)
raise Timeout('Request timed out.')
except RequestException as e:
if self.config.get('safe_mode', False):
# In safe mode, catch the exception and attach it to
# a blank urllib3.HTTPResponse object.
r = HTTPResponse()
r.error = e
else:
raise
self._build_response(r)
# Response manipulation hook.
self.response = dispatch_hook('response', self.hooks, self.response)
# Post-request hook.
r = dispatch_hook('post_request', self.hooks, self)
self.__dict__.update(r.__dict__)
# If prefetch is True, mark content as consumed.
if prefetch:
# Save the response.
self.response.content
if self.config.get('danger_mode'):
self.response.raise_for_status()
return self.sent
class Response(object):
"""The core :class:`Response <Response>` object. All
:class:`Request <Request>` objects contain a
:class:`response <Response>` attribute, which is an instance
of this class.
"""
def __init__(self):
self._content = None
self._content_consumed = False
#: Integer Code of responded HTTP Status.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
self.raw = None
#: Final URL location of Response.
self.url = None
#: Resulting :class:`HTTPError` of request, if one occurred.
self.error = None
#: Encoding to decode with when accessing r.content.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here.
self.history = []
#: The :class:`Request <Request>` that created the Response.
self.request = None
#: A dictionary of Cookies the server sent back.
self.cookies = {}
#: Dictionary of configurations for this request.
self.config = {}
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
def iter_content(self, chunk_size=10 * 1024, decode_unicode=False):
"""Iterates over the response data. This avoids reading the content
at once into memory for large responses. The chunk size is the number
of bytes it should read into memory. This is not necessarily the
length of each item returned as decoding can take place.
"""
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed'
)
def generate():
while 1:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
gen = stream_untransfer(generate(), self)
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
"""Iterates over the response data, one line at a time. This
avoids reading the content at once into memory for large
responses.
"""
pending = None
for chunk in self.iter_content(
chunk_size=chunk_size,
decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is None:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code is 0:
self._content = None
else:
self._content = bytes().join(self.iter_content()) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
return self._content
def _detected_encoding(self):
try:
detected = chardet.detect(self.content) or {}
return detected.get('encoding')
# Trust that chardet isn't available or something went terribly wrong.
except Exception:
pass
@property
def text(self):
"""Content of the response, in unicode.
if Response.encoding is None and chardet module is available, encoding
will be guessed.
"""
# Try charset from content-type
content = None
encoding = self.encoding
# Fallback to auto-detected encoding if chardet is available.
if self.encoding is None:
encoding = self._detected_encoding()
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except LookupError:
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
except (UnicodeError, TypeError):
pass
return content
def raise_for_status(self, allow_redirects=True):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
if self.error:
raise self.error
if (self.status_code >= 300) and (self.status_code < 400) and not allow_redirects:
http_error = HTTPError('%s Redirection' % self.status_code)
http_error.response = self
raise http_error
elif (self.status_code >= 400) and (self.status_code < 500):
http_error = HTTPError('%s Client Error' % self.status_code)
http_error.response = self
raise http_error
elif (self.status_code >= 500) and (self.status_code < 600):
http_error = HTTPError('%s Server Error' % self.status_code)
http_error.response = self
raise http_error
| samabhi/pstHealth | venv/lib/python2.7/site-packages/requests/models.py | Python | mit | 26,299 | 0.001711 |
# -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import copy
from . import utils
from .permissions import Permissions
from .enums import ChannelType
from collections import namedtuple
from .mixins import Hashable
from .role import Role
from .member import Member
Overwrites = namedtuple('Overwrites', 'id allow deny type')
PermissionOverwrite = namedtuple('PermissionOverwrite', 'allow deny')
class Channel(Hashable):
"""Represents a Discord server channel.
Supported Operations:
+-----------+---------------------------------------+
| Operation | Description |
+===========+=======================================+
| x == y | Checks if two channels are equal. |
+-----------+---------------------------------------+
| x != y | Checks if two channels are not equal. |
+-----------+---------------------------------------+
| hash(x) | Returns the channel's hash. |
+-----------+---------------------------------------+
| str(x) | Returns the channel's name. |
+-----------+---------------------------------------+
Attributes
-----------
name : str
The channel name.
server : :class:`Server`
The server the channel belongs to.
id : str
The channel ID.
topic : Optional[str]
The channel's topic. None if it doesn't exist.
is_private : bool
``True`` if the channel is a private channel (i.e. PM). ``False`` in this case.
position : int
The position in the channel list. This is a number that starts at 0. e.g. the
top channel is position 0. The position varies depending on being a voice channel
or a text channel, so a 0 position voice channel is on top of the voice channel
list.
type : :class:`ChannelType`
The channel type. There is a chance that the type will be ``str`` if
the channel type is not within the ones recognised by the enumerator.
bitrate : int
The channel's preferred audio bitrate in bits per second.
voice_members
A list of :class:`Members` that are currently inside this voice channel.
If :attr:`type` is not :attr:`ChannelType.voice` then this is always an empty array.
user_limit : int
The channel's limit for number of members that can be in a voice channel.
"""
__slots__ = [ 'voice_members', 'name', 'id', 'server', 'topic', 'position',
'is_private', 'type', 'bitrate', 'user_limit',
'_permission_overwrites' ]
def __init__(self, **kwargs):
self._update(**kwargs)
self.voice_members = []
def __str__(self):
return self.name
def _update(self, **kwargs):
self.name = kwargs.get('name')
self.server = kwargs.get('server')
self.id = kwargs.get('id')
self.topic = kwargs.get('topic')
self.is_private = False
self.position = kwargs.get('position')
self.bitrate = kwargs.get('bitrate')
self.type = kwargs.get('type')
self.user_limit = kwargs.get('user_limit')
try:
self.type = ChannelType(self.type)
except:
pass
self._permission_overwrites = []
everyone_index = 0
everyone_id = self.server.default_role.id
for index, overridden in enumerate(kwargs.get('permission_overwrites', [])):
overridden_id = overridden['id']
self._permission_overwrites.append(Overwrites(**overridden))
if overridden.get('type') == 'member':
continue
if overridden_id == everyone_id:
# the @everyone role is not guaranteed to be the first one
# in the list of permission overwrites, however the permission
# resolution code kind of requires that it is the first one in
# the list since it is special. So we need the index so we can
# swap it to be the first one.
everyone_index = index
# do the swap
tmp = self._permission_overwrites
if tmp:
tmp[everyone_index], tmp[0] = tmp[0], tmp[everyone_index]
@property
def changed_roles(self):
"""Returns a list of :class:`Roles` that have been overridden from
their default values in the :attr:`Server.roles` attribute."""
ret = []
for overwrite in filter(lambda o: o.type == 'role', self._permission_overwrites):
role = utils.get(self.server.roles, id=overwrite.id)
if role is None:
continue
role = copy.copy(role)
role.permissions.handle_overwrite(overwrite.allow, overwrite.deny)
ret.append(role)
return ret
@property
def is_default(self):
"""bool : Indicates if this is the default channel for the :class:`Server` it belongs to."""
return self.server.id == self.id
@property
def mention(self):
"""str : The string that allows you to mention the channel."""
return '<#{0.id}>'.format(self)
@property
def created_at(self):
"""Returns the channel's creation time in UTC."""
return utils.snowflake_time(self.id)
def overwrites_for(self, obj):
"""Returns a namedtuple that gives you the channel-specific overwrites
for a member or a role.
The named tuple is a tuple of (allow, deny) :class:`Permissions`
with the appropriately named entries.
Parameters
-----------
obj
The :class:`Role` or :class:`Member` or :class:`Object` denoting
whose overwrite to get.
"""
if isinstance(obj, Member):
predicate = lambda p: p.type == 'member'
elif isinstance(obj, Role):
predicate = lambda p: p.type == 'role'
else:
predicate = lambda p: True
for overwrite in filter(predicate, self._permission_overwrites):
if overwrite.id == obj.id:
return PermissionOverwrite(allow=Permissions(overwrite.allow),
deny=Permissions(overwrite.deny))
return PermissionOverwrite(allow=Permissions.none(), deny=Permissions.none())
def permissions_for(self, member):
"""Handles permission resolution for the current :class:`Member`.
This function takes into consideration the following cases:
- Server owner
- Server roles
- Channel overrides
- Member overrides
- Whether the channel is the default channel.
Parameters
----------
member : :class:`Member`
The member to resolve permissions for.
Returns
-------
:class:`Permissions`
The resolved permissions for the member.
"""
# The current cases can be explained as:
# Server owner get all permissions -- no questions asked. Otherwise...
# The @everyone role gets the first application.
# After that, the applied roles that the user has in the channel
# (or otherwise) are then OR'd together.
# After the role permissions are resolved, the member permissions
# have to take into effect.
# After all that is done.. you have to do the following:
# If manage permissions is True, then all permissions are set to
# True. If the channel is the default channel then everyone gets
# read permissions regardless.
# The operation first takes into consideration the denied
# and then the allowed.
if member.id == self.server.owner.id:
return Permissions.all()
default = self.server.default_role
base = Permissions(default.permissions.value)
# Apply server roles that the member has.
for role in member.roles:
base.value |= role.permissions.value
# Server-wide Administrator -> True for everything
# Bypass all channel-specific overrides
if base.administrator:
return Permissions.all()
member_role_ids = set(map(lambda r: r.id, member.roles))
# Apply channel specific role permission overwrites
for overwrite in self._permission_overwrites:
if overwrite.type == 'role' and overwrite.id in member_role_ids:
base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny)
# Apply member specific permission overwrites
for overwrite in self._permission_overwrites:
if overwrite.type == 'member' and overwrite.id == member.id:
base.handle_overwrite(allow=overwrite.allow, deny=overwrite.deny)
if self.is_default:
base.read_messages = True
return base
class PrivateChannel(Hashable):
"""Represents a Discord private channel.
Supported Operations:
+-----------+-------------------------------------------------+
| Operation | Description |
+===========+=================================================+
| x == y | Checks if two channels are equal. |
+-----------+-------------------------------------------------+
| x != y | Checks if two channels are not equal. |
+-----------+-------------------------------------------------+
| hash(x) | Returns the channel's hash. |
+-----------+-------------------------------------------------+
| str(x) | Returns the string "Direct Message with <User>" |
+-----------+-------------------------------------------------+
Attributes
----------
user : :class:`User`
The user you are participating with in the private channel.
id : str
The private channel ID.
is_private : bool
``True`` if the channel is a private channel (i.e. PM). ``True`` in this case.
"""
__slots__ = ['user', 'id', 'is_private']
def __init__(self, user, id, **kwargs):
self.user = user
self.id = id
self.is_private = True
def __str__(self):
return 'Direct Message with {0.name}'.format(self.user)
@property
def created_at(self):
"""Returns the private channel's creation time in UTC."""
return utils.snowflake_time(self.id)
def permissions_for(self, user):
"""Handles permission resolution for a :class:`User`.
This function is there for compatibility with :class:`Channel`.
Actual private messages do not really have the concept of permissions.
This returns all the Text related permissions set to true except:
- send_tts_messages: You cannot send TTS messages in a PM.
- manage_messages: You cannot delete others messages in a PM.
- mention_everyone: There is no one to mention in a PM.
Parameters
-----------
user : :class:`User`
The user to check permissions for.
Returns
--------
:class:`Permissions`
The resolved permissions for the user.
"""
base = Permissions.text()
base.send_tts_messages = False
base.manage_messages = False
base.mention_everyone = False
return base
| jhgg/discord.py | discord/channel.py | Python | mit | 12,385 | 0.001857 |
'''functions to work with contrasts for multiple tests
contrast matrices for comparing all pairs, all levels to reference level, ...
extension to 2-way groups in progress
TwoWay: class for bringing two-way analysis together and try out
various helper functions
Idea for second part
- get all transformation matrices to move in between different full rank
parameterizations
- standardize to one parameterization to get all interesting effects.
- multivariate normal distribution
- exploit or expand what we have in LikelihoodResults, cov_params, f_test,
t_test, example: resols_dropf_full.cov_params(C2)
- connect to new multiple comparison for contrast matrices, based on
multivariate normal or t distribution (Hothorn, Bretz, Westfall)
'''
import numpy as np
#next 3 functions copied from multicomp.py
def contrast_allpairs(nm):
'''contrast or restriction matrix for all pairs of nm variables
Parameters
----------
nm : int
Returns
-------
contr : ndarray, 2d, (nm*(nm-1)/2, nm)
contrast matrix for all pairwise comparisons
'''
contr = []
for i in range(nm):
for j in range(i+1, nm):
contr_row = np.zeros(nm)
contr_row[i] = 1
contr_row[j] = -1
contr.append(contr_row)
return np.array(contr)
def contrast_all_one(nm):
'''contrast or restriction matrix for all against first comparison
Parameters
----------
nm : int
Returns
-------
contr : ndarray, 2d, (nm-1, nm)
contrast matrix for all against first comparisons
'''
contr = np.column_stack((np.ones(nm-1), -np.eye(nm-1)))
return contr
def contrast_diff_mean(nm):
'''contrast or restriction matrix for all against mean comparison
Parameters
----------
nm : int
Returns
-------
contr : ndarray, 2d, (nm-1, nm)
contrast matrix for all against mean comparisons
'''
return np.eye(nm) - np.ones((nm,nm))/nm
def signstr(x, noplus=False):
if x in [-1,0,1]:
if not noplus:
return '+' if np.sign(x)>=0 else '-'
else:
return '' if np.sign(x)>=0 else '-'
else:
return str(x)
def contrast_labels(contrasts, names, reverse=False):
if reverse:
sl = slice(None, None, -1)
else:
sl = slice(None)
labels = [''.join(['%s%s' % (signstr(c, noplus=True),v)
for c,v in zip(row, names)[sl] if c != 0])
for row in contrasts]
return labels
def contrast_product(names1, names2, intgroup1=None, intgroup2=None, pairs=False):
'''build contrast matrices for products of two categorical variables
this is an experimental script and should be converted to a class
Parameters
----------
names1, names2 : lists of strings
contains the list of level labels for each categorical variable
intgroup1, intgroup2 : ndarrays TODO: this part not tested, finished yet
categorical variable
Notes
-----
This creates a full rank matrix. It does not do all pairwise comparisons,
parameterization is using contrast_all_one to get differences with first
level.
? does contrast_all_pairs work as a plugin to get all pairs ?
'''
n1 = len(names1)
n2 = len(names2)
names_prod = ['%s_%s' % (i,j) for i in names1 for j in names2]
ee1 = np.zeros((1,n1))
ee1[0,0] = 1
if not pairs:
dd = np.r_[ee1, -contrast_all_one(n1)]
else:
dd = np.r_[ee1, -contrast_allpairs(n1)]
contrast_prod = np.kron(dd[1:], np.eye(n2))
names_contrast_prod0 = contrast_labels(contrast_prod, names_prod, reverse=True)
names_contrast_prod = [''.join(['%s%s' % (signstr(c, noplus=True),v)
for c,v in zip(row, names_prod)[::-1] if c != 0])
for row in contrast_prod]
ee2 = np.zeros((1,n2))
ee2[0,0] = 1
#dd2 = np.r_[ee2, -contrast_all_one(n2)]
if not pairs:
dd2 = np.r_[ee2, -contrast_all_one(n2)]
else:
dd2 = np.r_[ee2, -contrast_allpairs(n2)]
contrast_prod2 = np.kron(np.eye(n1), dd2[1:])
names_contrast_prod2 = [''.join(['%s%s' % (signstr(c, noplus=True),v)
for c,v in zip(row, names_prod)[::-1] if c != 0])
for row in contrast_prod2]
if (not intgroup1 is None) and (not intgroup1 is None):
d1, _ = dummy_1d(intgroup1)
d2, _ = dummy_1d(intgroup2)
dummy = dummy_product(d1, d2)
else:
dummy = None
return (names_prod, contrast_prod, names_contrast_prod,
contrast_prod2, names_contrast_prod2, dummy)
def dummy_1d(x, varname=None):
'''dummy variable for id integer groups
Paramters
---------
x : ndarray, 1d
categorical variable, requires integers if varname is None
varname : string
name of the variable used in labels for category levels
Returns
-------
dummy : ndarray, 2d
array of dummy variables, one column for each level of the
category (full set)
labels : list of strings
labels for the columns, i.e. levels of each category
Notes
-----
use tools.categorical instead for more more options
See Also
--------
statsmodels.tools.categorical
Examples
--------
>>> x = np.array(['F', 'F', 'M', 'M', 'F', 'F', 'M', 'M', 'F', 'F', 'M', 'M'],
dtype='|S1')
>>> dummy_1d(x, varname='gender')
(array([[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1]]), ['gender_F', 'gender_M'])
'''
if varname is None: #assumes integer
labels = ['level_%d' % i for i in range(x.max() + 1)]
return (x[:,None]==np.arange(x.max()+1)).astype(int), labels
else:
grouplabels = np.unique(x)
labels = [varname + '_%s' % str(i) for i in grouplabels]
return (x[:,None]==grouplabels).astype(int), labels
def dummy_product(d1, d2, method='full'):
'''dummy variable from product of two dummy variables
Parameters
----------
d1, d2 : ndarray
two dummy variables, assumes full set for methods 'drop-last'
and 'drop-first'
method : {'full', 'drop-last', 'drop-first'}
'full' returns the full product, encoding of intersection of
categories.
The drop methods provide a difference dummy encoding:
(constant, main effects, interaction effects). The first or last columns
of the dummy variable (i.e. levels) are dropped to get full rank
dummy matrix.
Returns
-------
dummy : ndarray
dummy variable for product, see method
'''
if method == 'full':
dd = (d1[:,:,None]*d2[:,None,:]).reshape(d1.shape[0],-1)
elif method == 'drop-last': #same as SAS transreg
d12rl = dummy_product(d1[:,:-1], d2[:,:-1])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,:-1], d2[:,:-1],d12rl))
#Note: dtype int should preserve dtype of d1 and d2
elif method == 'drop-first':
d12r = dummy_product(d1[:,1:], d2[:,1:])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,1:], d2[:,1:],d12r))
else:
raise ValueError('method not recognized')
return dd
def dummy_limits(d):
'''start and endpoints of groups in a sorted dummy variable array
helper function for nested categories
Examples
--------
>>> d1 = np.array([[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1]])
>>> dummy_limits(d1)
(array([0, 4, 8]), array([ 4, 8, 12]))
get group slices from an array
>>> [np.arange(d1.shape[0])[b:e] for b,e in zip(*dummy_limits(d1))]
[array([0, 1, 2, 3]), array([4, 5, 6, 7]), array([ 8, 9, 10, 11])]
>>> [np.arange(d1.shape[0])[b:e] for b,e in zip(*dummy_limits(d1))]
[array([0, 1, 2, 3]), array([4, 5, 6, 7]), array([ 8, 9, 10, 11])]
'''
nobs, nvars = d.shape
start1, col1 = np.nonzero(np.diff(d,axis=0)==1)
end1, col1_ = np.nonzero(np.diff(d,axis=0)==-1)
cc = np.arange(nvars)
#print cc, np.r_[[0], col1], np.r_[col1_, [nvars-1]]
if ((not (np.r_[[0], col1] == cc).all())
or (not (np.r_[col1_, [nvars-1]] == cc).all())):
raise ValueError('dummy variable is not sorted')
start = np.r_[[0], start1+1]
end = np.r_[end1+1, [nobs]]
return start, end
def dummy_nested(d1, d2, method='full'):
'''unfinished and incomplete mainly copy past dummy_product
dummy variable from product of two dummy variables
Parameters
----------
d1, d2 : ndarray
two dummy variables, d2 is assumed to be nested in d1
Assumes full set for methods 'drop-last' and 'drop-first'.
method : {'full', 'drop-last', 'drop-first'}
'full' returns the full product, which in this case is d2.
The drop methods provide an effects encoding:
(constant, main effects, subgroup effects). The first or last columns
of the dummy variable (i.e. levels) are dropped to get full rank
encoding.
Returns
-------
dummy : ndarray
dummy variable for product, see method
'''
if method == 'full':
return d2
start1, end1 = dummy_limits(d1)
start2, end2 = dummy_limits(d2)
first = np.in1d(start2, start1)
last = np.in1d(end2, end1)
equal = (first == last)
col_dropf = ~first*~equal
col_dropl = ~last*~equal
if method == 'drop-last':
d12rl = dummy_product(d1[:,:-1], d2[:,:-1])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,:-1], d2[:,col_dropl]))
#Note: dtype int should preserve dtype of d1 and d2
elif method == 'drop-first':
d12r = dummy_product(d1[:,1:], d2[:,1:])
dd = np.column_stack((np.ones(d1.shape[0], int), d1[:,1:], d2[:,col_dropf]))
else:
raise ValueError('method not recognized')
return dd, col_dropf, col_dropl
class DummyTransform(object):
'''Conversion between full rank dummy encodings
y = X b + u
b = C a
a = C^{-1} b
y = X C a + u
define Z = X C, then
y = Z a + u
contrasts:
R_b b = r
R_a a = R_b C a = r
where R_a = R_b C
Here C is the transform matrix, with dot_left and dot_right as the main
methods, and the same for the inverse transform matrix, C^{-1}
Note:
- The class was mainly written to keep left and right straight.
- No checking is done.
- not sure yet if method names make sense
'''
def __init__(self, d1, d2):
'''C such that d1 C = d2, with d1 = X, d2 = Z
should be (x, z) in arguments ?
'''
self.transf_matrix = np.linalg.lstsq(d1, d2)[0]
self.invtransf_matrix = np.linalg.lstsq(d2, d1)[0]
def dot_left(self, a):
''' b = C a
'''
return np.dot(self.transf_matrix, a)
def dot_right(self, x):
''' z = x C
'''
return np.dot(x, self.transf_matrix)
def inv_dot_left(self, b):
''' a = C^{-1} b
'''
return np.dot(self.invtransf_matrix, b)
def inv_dot_right(self, z):
''' x = z C^{-1}
'''
return np.dot(z, self.invtransf_matrix)
def groupmean_d(x, d):
'''groupmeans using dummy variables
Parameters
----------
x : array_like, ndim
data array, tested for 1,2 and 3 dimensions
d : ndarray, 1d
dummy variable, needs to have the same length
as x in axis 0.
Returns
-------
groupmeans : ndarray, ndim-1
means for each group along axis 0, the levels
of the groups are the last axis
Notes
-----
This will be memory intensive if there are many levels
in the categorical variable, i.e. many columns in the
dummy variable. In this case it is recommended to use
a more efficient version.
'''
x = np.asarray(x)
## if x.ndim == 1:
## nvars = 1
## else:
nvars = x.ndim + 1
sli = [slice(None)] + [None]*(nvars-2) + [slice(None)]
return (x[...,None] * d[sli]).sum(0)*1./d.sum(0)
class TwoWay(object):
'''a wrapper class for two way anova type of analysis with OLS
currently mainly to bring things together
Notes
-----
unclear: adding multiple test might assume block design or orthogonality
This estimates the full dummy version with OLS.
The drop first dummy representation can be recovered through the
transform method.
TODO: add more methods, tests, pairwise, multiple, marginal effects
try out what can be added for userfriendly access.
missing: ANOVA table
'''
def __init__(self, endog, factor1, factor2, varnames=None):
self.nobs = factor1.shape[0]
if varnames is None:
vname1 = 'a'
vname2 = 'b'
else:
vname1, vname1 = varnames
self.d1, self.d1_labels = d1, d1_labels = dummy_1d(factor1, vname1)
self.d2, self.d2_labels = d2, d2_labels = dummy_1d(factor2, vname2)
self.nlevel1 = nlevel1 = d1.shape[1]
self.nlevel2 = nlevel2 = d2.shape[1]
#get product dummies
res = contrast_product(d1_labels, d2_labels)
prodlab, C1, C1lab, C2, C2lab, _ = res
self.prod_label, self.C1, self.C1_label, self.C2, self.C2_label, _ = res
dp_full = dummy_product(d1, d2, method='full')
dp_dropf = dummy_product(d1, d2, method='drop-first')
self.transform = DummyTransform(dp_full, dp_dropf)
#estimate the model
self.nvars = dp_full.shape[1]
self.exog = dp_full
self.resols = sm.OLS(endog, dp_full).fit()
self.params = self.resols.params
#get transformed parameters, (constant, main, interaction effect)
self.params_dropf = self.transform.inv_dot_left(self.params)
self.start_interaction = 1 + (nlevel1 - 1) + (nlevel2 - 1)
self.n_interaction = self.nvars - self.start_interaction
#convert to cached property
def r_nointer(self):
'''contrast/restriction matrix for no interaction
'''
nia = self.n_interaction
R_nointer = np.hstack((np.zeros((nia, self.nvars-nia)), np.eye(nia)))
#inter_direct = resols_full_dropf.tval[-nia:]
R_nointer_transf = self.transform.inv_dot_right(R_nointer)
self.R_nointer_transf = R_nointer_transf
return R_nointer_transf
def ttest_interaction(self):
'''ttests for no-interaction terms are zero
'''
#use self.r_nointer instead
nia = self.n_interaction
R_nointer = np.hstack((np.zeros((nia, self.nvars-nia)), np.eye(nia)))
#inter_direct = resols_full_dropf.tval[-nia:]
R_nointer_transf = self.transform.inv_dot_right(R_nointer)
self.R_nointer_transf = R_nointer_transf
t_res = self.resols.t_test(R_nointer_transf)
return t_res
def ftest_interaction(self):
'''ttests for no-interaction terms are zero
'''
R_nointer_transf = self.r_nointer()
return self.resols.f_test(R_nointer_transf)
def ttest_conditional_effect(self, factorind):
if factorind == 1:
return self.resols.t_test(self.C1), self.C1_label
else:
return self.resols.t_test(self.C2), self.C2_label
def summary_coeff(self):
from statsmodels.iolib import SimpleTable
params_arr = self.params.reshape(self.nlevel1, self.nlevel2)
stubs = self.d1_labels
headers = self.d2_labels
title = 'Estimated Coefficients by factors'
table_fmt = dict(
data_fmts = ["%#10.4g"]*self.nlevel2)
return SimpleTable(params_arr, headers, stubs, title=title,
txt_fmt=table_fmt)
#--------------- tests
from numpy.testing import assert_equal
#TODO: several tests still missing, several are in the example with print
class TestContrastTools(object):
def __init__(self):
self.v1name = ['a0', 'a1', 'a2']
self.v2name = ['b0', 'b1']
self.d1 = np.array([[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1]])
def test_dummy_1d(self):
x = np.array(['F', 'F', 'M', 'M', 'F', 'F', 'M', 'M', 'F', 'F', 'M', 'M'],
dtype='|S1')
d, labels = (np.array([[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1]]), ['gender_F', 'gender_M'])
res_d, res_labels = dummy_1d(x, varname='gender')
assert_equal(res_d, d)
assert_equal(res_labels, labels)
def test_contrast_product(self):
res_cp = contrast_product(self.v1name, self.v2name)
res_t = [0]*6
res_t[0] = ['a0_b0', 'a0_b1', 'a1_b0', 'a1_b1', 'a2_b0', 'a2_b1']
res_t[1] = np.array([[-1., 0., 1., 0., 0., 0.],
[ 0., -1., 0., 1., 0., 0.],
[-1., 0., 0., 0., 1., 0.],
[ 0., -1., 0., 0., 0., 1.]])
res_t[2] = ['a1_b0-a0_b0', 'a1_b1-a0_b1', 'a2_b0-a0_b0', 'a2_b1-a0_b1']
res_t[3] = np.array([[-1., 1., 0., 0., 0., 0.],
[ 0., 0., -1., 1., 0., 0.],
[ 0., 0., 0., 0., -1., 1.]])
res_t[4] = ['a0_b1-a0_b0', 'a1_b1-a1_b0', 'a2_b1-a2_b0']
for ii in range(5):
np.testing.assert_equal(res_cp[ii], res_t[ii], err_msg=str(ii))
def test_dummy_limits(self):
b,e = dummy_limits(self.d1)
assert_equal(b, np.array([0, 4, 8]))
assert_equal(e, np.array([ 4, 8, 12]))
if __name__ == '__main__':
tt = TestContrastTools()
tt.test_contrast_product()
tt.test_dummy_1d()
tt.test_dummy_limits()
import statsmodels.api as sm
examples = ['small', 'large', None][1]
v1name = ['a0', 'a1', 'a2']
v2name = ['b0', 'b1']
res_cp = contrast_product(v1name, v2name)
print res_cp
y = np.arange(12)
x1 = np.arange(12)//4
x2 = np.arange(12)//2%2
if 'small' in examples:
d1, d1_labels = dummy_1d(x1)
d2, d2_labels = dummy_1d(x2)
if 'large' in examples:
x1 = np.repeat(x1, 5, axis=0)
x2 = np.repeat(x2, 5, axis=0)
nobs = x1.shape[0]
d1, d1_labels = dummy_1d(x1)
d2, d2_labels = dummy_1d(x2)
dd_full = dummy_product(d1, d2, method='full')
dd_dropl = dummy_product(d1, d2, method='drop-last')
dd_dropf = dummy_product(d1, d2, method='drop-first')
#Note: full parameterization of dummies is orthogonal
#np.eye(6)*10 in "large" example
print (np.dot(dd_full.T, dd_full) == np.diag(dd_full.sum(0))).all()
#check that transforms work
#generate 3 data sets with the 3 different parameterizations
effect_size = [1., 0.01][1]
noise_scale = [0.001, 0.1][0]
noise = noise_scale * np.random.randn(nobs)
beta = effect_size * np.arange(1,7)
ydata_full = (dd_full * beta).sum(1) + noise
ydata_dropl = (dd_dropl * beta).sum(1) + noise
ydata_dropf = (dd_dropf * beta).sum(1) + noise
resols_full_full = sm.OLS(ydata_full, dd_full).fit()
resols_full_dropf = sm.OLS(ydata_full, dd_dropf).fit()
params_f_f = resols_full_full.params
params_f_df = resols_full_dropf.params
resols_dropf_full = sm.OLS(ydata_dropf, dd_full).fit()
resols_dropf_dropf = sm.OLS(ydata_dropf, dd_dropf).fit()
params_df_f = resols_dropf_full.params
params_df_df = resols_dropf_dropf.params
tr_of = np.linalg.lstsq(dd_dropf, dd_full)[0]
tr_fo = np.linalg.lstsq(dd_full, dd_dropf)[0]
print np.dot(tr_fo, params_df_df) - params_df_f
print np.dot(tr_of, params_f_f) - params_f_df
transf_f_df = DummyTransform(dd_full, dd_dropf)
print np.max(np.abs((dd_full - transf_f_df.inv_dot_right(dd_dropf))))
print np.max(np.abs((dd_dropf - transf_f_df.dot_right(dd_full))))
print np.max(np.abs((params_df_df
- transf_f_df.inv_dot_left(params_df_f))))
np.max(np.abs((params_f_df
- transf_f_df.inv_dot_left(params_f_f))))
prodlab, C1, C1lab, C2, C2lab,_ = contrast_product(v1name, v2name)
print '\ntvalues for no effect of factor 1'
print 'each test is conditional on a level of factor 2'
print C1lab
print resols_dropf_full.t_test(C1).tvalue
print '\ntvalues for no effect of factor 2'
print 'each test is conditional on a level of factor 1'
print C2lab
print resols_dropf_full.t_test(C2).tvalue
#covariance matrix of restrictions C2, note: orthogonal
resols_dropf_full.cov_params(C2)
#testing for no interaction effect
R_noint = np.hstack((np.zeros((2,4)), np.eye(2)))
inter_direct = resols_full_dropf.tvalues[-2:]
inter_transf = resols_full_full.t_test(transf_f_df.inv_dot_right(R_noint)).tvalue
print np.max(np.abs((inter_direct - inter_transf)))
#now with class version
tw = TwoWay(ydata_dropf, x1, x2)
print tw.ttest_interaction().tvalue
print tw.ttest_interaction().pvalue
print tw.ftest_interaction().fvalue
print tw.ftest_interaction().pvalue
print tw.ttest_conditional_effect(1)[0].tvalue
print tw.ttest_conditional_effect(2)[0].tvalue
print tw.summary_coeff()
''' documentation for early examples while developing - some have changed already
>>> y = np.arange(12)
>>> y
array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11])
>>> x1 = np.arange(12)//4
>>> x1
array([0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2])
>>> x2 = np.arange(12)//2%2
>>> x2
array([0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1])
>>> d1 = dummy_1d(x1)
>>> d1
array([[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1]])
>>> d2 = dummy_1d(x2)
>>> d2
array([[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1],
[1, 0],
[1, 0],
[0, 1],
[0, 1]])
>>> d12 = dummy_product(d1, d2)
>>> d12
array([[1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1]])
>>> d12rl = dummy_product(d1[:,:-1], d2[:,:-1])
>>> np.column_stack((np.ones(d1.shape[0]), d1[:,:-1], d2[:,:-1],d12rl))
array([[ 1., 1., 0., 1., 1., 0.],
[ 1., 1., 0., 1., 1., 0.],
[ 1., 1., 0., 0., 0., 0.],
[ 1., 1., 0., 0., 0., 0.],
[ 1., 0., 1., 1., 0., 1.],
[ 1., 0., 1., 1., 0., 1.],
[ 1., 0., 1., 0., 0., 0.],
[ 1., 0., 1., 0., 0., 0.],
[ 1., 0., 0., 1., 0., 0.],
[ 1., 0., 0., 1., 0., 0.],
[ 1., 0., 0., 0., 0., 0.],
[ 1., 0., 0., 0., 0., 0.]])
'''
#nprod = ['%s_%s' % (i,j) for i in ['a0', 'a1', 'a2'] for j in ['b0', 'b1']]
#>>> [''.join(['%s%s' % (signstr(c),v) for c,v in zip(row, nprod) if c != 0])
# for row in np.kron(dd[1:], np.eye(2))]
'''
>>> nprod = ['%s_%s' % (i,j) for i in ['a0', 'a1', 'a2'] for j in ['b0', 'b1']]
>>> nprod
['a0_b0', 'a0_b1', 'a1_b0', 'a1_b1', 'a2_b0', 'a2_b1']
>>> [''.join(['%s%s' % (signstr(c),v) for c,v in zip(row, nprod) if c != 0]) for row in np.kron(dd[1:], np.eye(2))]
['-a0b0+a1b0', '-a0b1+a1b1', '-a0b0+a2b0', '-a0b1+a2b1']
>>> [''.join(['%s%s' % (signstr(c),v) for c,v in zip(row, nprod)[::-1] if c != 0]) for row in np.kron(dd[1:], np.eye(2))]
['+a1_b0-a0_b0', '+a1_b1-a0_b1', '+a2_b0-a0_b0', '+a2_b1-a0_b1']
>>> np.r_[[[1,0,0,0,0]],contrast_all_one(5)]
array([[ 1., 0., 0., 0., 0.],
[ 1., -1., 0., 0., 0.],
[ 1., 0., -1., 0., 0.],
[ 1., 0., 0., -1., 0.],
[ 1., 0., 0., 0., -1.]])
>>> idxprod = [(i,j) for i in range(3) for j in range(2)]
>>> idxprod
[(0, 0), (0, 1), (1, 0), (1, 1), (2, 0), (2, 1)]
>>> np.array(idxprod).reshape(2,3,2,order='F')[:,:,0]
array([[0, 1, 2],
[0, 1, 2]])
>>> np.array(idxprod).reshape(2,3,2,order='F')[:,:,1]
array([[0, 0, 0],
[1, 1, 1]])
>>> dd3_ = np.r_[[[0,0,0]],contrast_all_one(3)]
pairwise contrasts and reparameterization
dd = np.r_[[[1,0,0,0,0]],-contrast_all_one(5)]
>>> dd
array([[ 1., 0., 0., 0., 0.],
[-1., 1., 0., 0., 0.],
[-1., 0., 1., 0., 0.],
[-1., 0., 0., 1., 0.],
[-1., 0., 0., 0., 1.]])
>>> np.dot(dd.T, np.arange(5))
array([-10., 1., 2., 3., 4.])
>>> np.round(np.linalg.inv(dd.T)).astype(int)
array([[1, 1, 1, 1, 1],
[0, 1, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 1]])
>>> np.round(np.linalg.inv(dd)).astype(int)
array([[1, 0, 0, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 1, 0, 0],
[1, 0, 0, 1, 0],
[1, 0, 0, 0, 1]])
>>> dd
array([[ 1., 0., 0., 0., 0.],
[-1., 1., 0., 0., 0.],
[-1., 0., 1., 0., 0.],
[-1., 0., 0., 1., 0.],
[-1., 0., 0., 0., 1.]])
>>> ddinv=np.round(np.linalg.inv(dd.T)).astype(int)
>>> np.dot(ddinv, np.arange(5))
array([10, 1, 2, 3, 4])
>>> np.dot(dd, np.arange(5))
array([ 0., 1., 2., 3., 4.])
>>> np.dot(dd, 5+np.arange(5))
array([ 5., 1., 2., 3., 4.])
>>> ddinv2 = np.round(np.linalg.inv(dd)).astype(int)
>>> np.dot(ddinv2, np.arange(5))
array([0, 1, 2, 3, 4])
>>> np.dot(ddinv2, 5+np.arange(5))
array([ 5, 11, 12, 13, 14])
>>> np.dot(ddinv2, [5, 0, 0 , 1, 2])
array([5, 5, 5, 6, 7])
>>> np.dot(ddinv2, dd)
array([[ 1., 0., 0., 0., 0.],
[ 0., 1., 0., 0., 0.],
[ 0., 0., 1., 0., 0.],
[ 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 1.]])
>>> dd3 = -np.r_[[[1,0,0]],contrast_all_one(3)]
>>> dd2 = -np.r_[[[1,0]],contrast_all_one(2)]
>>> np.kron(np.eye(3), dd2)
array([[-1., 0., 0., 0., 0., 0.],
[-1., 1., 0., 0., 0., 0.],
[ 0., 0., -1., 0., 0., 0.],
[ 0., 0., -1., 1., 0., 0.],
[ 0., 0., 0., 0., -1., 0.],
[ 0., 0., 0., 0., -1., 1.]])
>>> dd2
array([[-1., 0.],
[-1., 1.]])
>>> np.kron(np.eye(3), dd2[1:])
array([[-1., 1., 0., 0., 0., 0.],
[ 0., 0., -1., 1., 0., 0.],
[ 0., 0., 0., 0., -1., 1.]])
>>> np.kron(dd[1:], np.eye(2))
array([[-1., 0., 1., 0., 0., 0.],
[ 0., -1., 0., 1., 0., 0.],
[-1., 0., 0., 0., 1., 0.],
[ 0., -1., 0., 0., 0., 1.]])
d_ = np.r_[[[1,0,0,0,0]],contrast_all_one(5)]
>>> d_
array([[ 1., 0., 0., 0., 0.],
[ 1., -1., 0., 0., 0.],
[ 1., 0., -1., 0., 0.],
[ 1., 0., 0., -1., 0.],
[ 1., 0., 0., 0., -1.]])
>>> np.round(np.linalg.pinv(d_)).astype(int)
array([[ 1, 0, 0, 0, 0],
[ 1, -1, 0, 0, 0],
[ 1, 0, -1, 0, 0],
[ 1, 0, 0, -1, 0],
[ 1, 0, 0, 0, -1]])
>>> np.linalg.inv(d_).astype(int)
array([[ 1, 0, 0, 0, 0],
[ 1, -1, 0, 0, 0],
[ 1, 0, -1, 0, 0],
[ 1, 0, 0, -1, 0],
[ 1, 0, 0, 0, -1]])
group means
>>> sli = [slice(None)] + [None]*(3-2) + [slice(None)]
>>> (np.column_stack((y, x1, x2))[...,None] * d1[sli]).sum(0)*1./d1.sum(0)
array([[ 1.5, 5.5, 9.5],
[ 0. , 1. , 2. ],
[ 0.5, 0.5, 0.5]])
>>> [(z[:,None] * d1).sum(0)*1./d1.sum(0) for z in np.column_stack((y, x1, x2)).T]
[array([ 1.5, 5.5, 9.5]), array([ 0., 1., 2.]), array([ 0.5, 0.5, 0.5])]
>>>
'''
| yarikoptic/pystatsmodels | statsmodels/sandbox/stats/contrast_tools.py | Python | bsd-3-clause | 28,737 | 0.004941 |
import sublime, sublime_plugin
from indenttxt import indentparser
class IndentToList(sublime_plugin.TextCommand):
def run(self, edit):
parser = indentparser.IndentTxtParser()
#Get current selection
sels = self.view.sel()
selsParsed = 0
if(len(sels) > 0):
for sel in sels:
#Make sure selection isn't just a cursor
if(abs(sel.b - sel.a) > 0):
self.parseRegion(parser, sel, edit)
selsParsed += 1
#All selections just cursor marks?
if(selsParsed == 0):
region = sublime.Region(0, self.view.size() - 1)
self.parseRegion(parser, region, edit)
def parseRegion(self, parser, region, edit):
lines = self.view.line(region)
text = self.view.substr(lines)
indented = parser.parseText(text)
newview = self.view.window().new_file()
newview.insert(edit, 0, indented)
| Harrison-M/indent.txt-sublime | indentsublime.py | Python | mit | 967 | 0.004137 |
"""
# The NASA93 Data Set
Standard header:
"""
from __future__ import division,print_function
import sys
sys.dont_write_bytecode = True
from lib import *
"""
data.dat:
"""
def nasa93():
vl=1;l=2;n=3;h=4;vh=5;xh=6
return data(indep= [
# 0..8
'Prec', 'Flex', 'Resl', 'Team', 'Pmat', 'rely', 'data.dat', 'cplx', 'ruse',
# 9 .. 17
'docu', 'time', 'stor', 'pvol', 'acap', 'pcap', 'pcon', 'aexp', 'plex',
# 18 .. 25
'ltex', 'tool', 'site', 'sced', 'kloc'],
less = ['effort', 'defects', 'months'],
_rows=[
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,25.9,117.6,808,15.3],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,24.6,117.6,767,15.0],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,7.7,31.2,240,10.1],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,8.2,36,256,10.4],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,9.7,25.2,302,11.0],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,2.2,8.4,69,6.6],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,3.5,10.8,109,7.8],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,66.6,352.8,2077,21.0],
[h,h,h,vh,h,h,l,h,n,n,xh,xh,l,h,h,n,h,n,h,h,n,n,7.5,72,226,13.6],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,vh,n,vh,n,h,n,n,n,20,72,566,14.4],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,vh,n,h,n,n,n,6,24,188,9.9],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,vh,n,vh,n,h,n,n,n,100,360,2832,25.2],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,n,n,vh,n,l,n,n,n,11.3,36,456,12.8],
[h,h,h,vh,n,n,l,h,n,n,n,n,h,h,h,n,h,l,vl,n,n,n,100,215,5434,30.1],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,vh,n,h,n,n,n,20,48,626,15.1],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,n,n,n,n,vl,n,n,n,100,360,4342,28.0],
[h,h,h,vh,n,n,l,h,n,n,n,xh,l,h,vh,n,vh,n,h,n,n,n,150,324,4868,32.5],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,h,n,h,n,n,n,31.5,60,986,17.6],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,h,h,n,vh,n,h,n,n,n,15,48,470,13.6],
[h,h,h,vh,n,n,l,h,n,n,n,xh,l,h,n,n,h,n,h,n,n,n,32.5,60,1276,20.8],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,19.7,60,614,13.9],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,66.6,300,2077,21.0],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,29.5,120,920,16.0],
[h,h,h,vh,n,h,n,n,n,n,h,n,n,n,h,n,h,n,n,n,n,n,15,90,575,15.2],
[h,h,h,vh,n,h,n,h,n,n,n,n,n,n,h,n,h,n,n,n,n,n,38,210,1553,21.3],
[h,h,h,vh,n,n,n,n,n,n,n,n,n,n,h,n,h,n,n,n,n,n,10,48,427,12.4],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,15.4,70,765,14.5],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,48.5,239,2409,21.4],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,16.3,82,810,14.8],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,12.8,62,636,13.6],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,32.6,170,1619,18.7],
[h,h,h,vh,h,n,vh,h,n,n,vh,vh,l,vh,n,n,h,l,h,n,n,l,35.5,192,1763,19.3],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,5.5,18,172,9.1],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,10.4,50,324,11.2],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,14,60,437,12.4],
[h,h,h,vh,n,h,n,h,n,n,n,n,n,n,n,n,n,n,n,n,n,n,6.5,42,290,12.0],
[h,h,h,vh,n,n,n,h,n,n,n,n,n,n,n,n,n,n,n,n,n,n,13,60,683,14.8],
[h,h,h,vh,h,n,n,h,n,n,n,n,n,n,h,n,n,n,h,h,n,n,90,444,3343,26.7],
[h,h,h,vh,n,n,n,h,n,n,n,n,n,n,n,n,n,n,n,n,n,n,8,42,420,12.5],
[h,h,h,vh,n,n,n,h,n,n,h,n,n,n,n,n,n,n,n,n,n,n,16,114,887,16.4],
[h,h,h,vh,h,n,h,h,n,n,vh,h,l,h,h,n,n,l,h,n,n,l,177.9,1248,7998,31.5],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,h,n,n,n,n,n,n,n,302,2400,8543,38.4],
[h,h,h,vh,h,n,h,l,n,n,n,n,h,h,n,n,h,n,n,h,n,n,282.1,1368,9820,37.3],
[h,h,h,vh,h,h,h,l,n,n,n,n,n,h,n,n,h,n,n,n,n,n,284.7,973,8518,38.1],
[h,h,h,vh,n,h,h,n,n,n,n,n,l,n,h,n,h,n,h,n,n,n,79,400,2327,26.9],
[h,h,h,vh,l,l,n,n,n,n,n,n,l,h,vh,n,h,n,h,n,n,n,423,2400,18447,41.9],
[h,h,h,vh,h,n,n,n,n,n,n,n,l,h,vh,n,vh,l,h,n,n,n,190,420,5092,30.3],
[h,h,h,vh,h,n,n,h,n,n,n,h,n,h,n,n,h,n,h,n,n,n,47.5,252,2007,22.3],
[h,h,h,vh,l,vh,n,xh,n,n,h,h,l,n,n,n,h,n,n,h,n,n,21,107,1058,21.3],
[h,h,h,vh,l,n,h,h,n,n,vh,n,n,h,h,n,h,n,h,n,n,n,78,571.4,4815,30.5],
[h,h,h,vh,l,n,h,h,n,n,vh,n,n,h,h,n,h,n,h,n,n,n,11.4,98.8,704,15.5],
[h,h,h,vh,l,n,h,h,n,n,vh,n,n,h,h,n,h,n,h,n,n,n,19.3,155,1191,18.6],
[h,h,h,vh,l,h,n,vh,n,n,h,h,l,h,n,n,n,h,h,n,n,n,101,750,4840,32.4],
[h,h,h,vh,l,h,n,h,n,n,h,h,l,n,n,n,h,n,n,n,n,n,219,2120,11761,42.8],
[h,h,h,vh,l,h,n,h,n,n,h,h,l,n,n,n,h,n,n,n,n,n,50,370,2685,25.4],
[h,h,h,vh,h,vh,h,h,n,n,vh,vh,n,vh,vh,n,vh,n,h,h,n,l,227,1181,6293,33.8],
[h,h,h,vh,h,n,h,vh,n,n,n,n,l,h,vh,n,n,l,n,n,n,l,70,278,2950,20.2],
[h,h,h,vh,h,h,l,h,n,n,n,n,l,n,n,n,n,n,h,n,n,l,0.9,8.4,28,4.9],
[h,h,h,vh,l,vh,l,xh,n,n,xh,vh,l,h,h,n,vh,vl,h,n,n,n,980,4560,50961,96.4],
[h,h,h,vh,n,n,l,h,n,n,n,n,l,vh,vh,n,n,h,h,n,n,n,350,720,8547,35.7],
[h,h,h,vh,h,h,n,xh,n,n,h,h,l,h,n,n,n,h,h,h,n,n,70,458,2404,27.5],
[h,h,h,vh,h,h,n,xh,n,n,h,h,l,h,n,n,n,h,h,h,n,n,271,2460,9308,43.4],
[h,h,h,vh,n,n,n,n,n,n,n,n,l,h,h,n,h,n,h,n,n,n,90,162,2743,25.0],
[h,h,h,vh,n,n,n,n,n,n,n,n,l,h,h,n,h,n,h,n,n,n,40,150,1219,18.9],
[h,h,h,vh,n,h,n,h,n,n,h,n,l,h,h,n,h,n,h,n,n,n,137,636,4210,32.2],
[h,h,h,vh,n,h,n,h,n,n,h,n,h,h,h,n,h,n,h,n,n,n,150,882,5848,36.2],
[h,h,h,vh,n,vh,n,h,n,n,h,n,l,h,h,n,h,n,h,n,n,n,339,444,8477,45.9],
[h,h,h,vh,n,l,h,l,n,n,n,n,h,h,h,n,h,n,h,n,n,n,240,192,10313,37.1],
[h,h,h,vh,l,h,n,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,144,576,6129,28.8],
[h,h,h,vh,l,n,l,n,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,151,432,6136,26.2],
[h,h,h,vh,l,n,l,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,34,72,1555,16.2],
[h,h,h,vh,l,n,n,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,98,300,4907,24.4],
[h,h,h,vh,l,n,n,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,85,300,4256,23.2],
[h,h,h,vh,l,n,l,n,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,20,240,813,12.8],
[h,h,h,vh,l,n,l,n,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,111,600,4511,23.5],
[h,h,h,vh,l,h,vh,h,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,162,756,7553,32.4],
[h,h,h,vh,l,h,h,vh,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,352,1200,17597,42.9],
[h,h,h,vh,l,h,n,vh,n,n,n,vh,l,h,h,n,h,h,h,n,n,l,165,97,7867,31.5],
[h,h,h,vh,h,h,n,vh,n,n,h,h,l,h,n,n,n,h,h,n,n,n,60,409,2004,24.9],
[h,h,h,vh,h,h,n,vh,n,n,h,h,l,h,n,n,n,h,h,n,n,n,100,703,3340,29.6],
[h,h,h,vh,n,h,vh,vh,n,n,xh,xh,h,n,n,n,n,l,l,n,n,n,32,1350,2984,33.6],
[h,h,h,vh,h,h,h,h,n,n,vh,xh,h,h,h,n,h,h,h,n,n,n,53,480,2227,28.8],
[h,h,h,vh,h,h,l,vh,n,n,vh,xh,l,vh,vh,n,vh,vl,vl,h,n,n,41,599,1594,23.0],
[h,h,h,vh,h,h,l,vh,n,n,vh,xh,l,vh,vh,n,vh,vl,vl,h,n,n,24,430,933,19.2],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,165,4178.2,6266,47.3],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,65,1772.5,2468,34.5],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,70,1645.9,2658,35.4],
[h,h,h,vh,h,vh,h,xh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,50,1924.5,2102,34.2],
[h,h,h,vh,l,vh,l,vh,n,n,vh,xh,l,h,n,n,l,vl,l,h,n,n,7.25,648,406,15.6],
[h,h,h,vh,h,vh,h,vh,n,n,xh,xh,n,h,h,n,h,h,h,n,n,n,233,8211,8848,53.1],
[h,h,h,vh,n,h,n,vh,n,n,vh,vh,h,n,n,n,n,l,l,n,n,n,16.3,480,1253,21.5],
[h,h,h,vh,n,h,n,vh,n,n,vh,vh,h,n,n,n,n,l,l,n,n,n, 6.2, 12,477,15.4],
[h,h,h,vh,n,h,n,vh,n,n,vh,vh,h,n,n,n,n,l,l,n,n,n, 3.0, 38,231,12.0],
])
"""
Demo code:
"""
def _nasa93(): print(nasa93.__name__)
#_nasa93()
#if __name__ == '__main__': eval(todo('_nasa93()'))
| rahlk/WarnPlan | warnplan/commons/tools/axe/nasa93.py | Python | mit | 7,488 | 0.31437 |
# IVLE
# Copyright (C) 2007-2009 The University of Melbourne
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# Author: Will Grant, Nick Chadwick
import urllib
import datetime
try:
import mod_python.Cookie
except ImportError:
# This needs to be importable from outside Apache.
pass
import ivle.pulldown_subj
import ivle.webapp.security
from ivle.auth import authenticate, AuthError
from ivle.webapp.base.xhtml import XHTMLView
from ivle.webapp.base.plugins import CookiePlugin
class LoginView(XHTMLView):
'''A view to allow a user to log in.'''
template = 'login.html'
allow_overlays = False
def authorize(self, req):
return True
def populate(self, req, ctx):
fields = req.get_fieldstorage()
nexturl = fields.getfirst('url')
# XXX Warning that Internet Explorer is unsupported
# Test if the user is in Internet Explorer
try:
useragent = req.headers_in['User-Agent']
# A bit of very basic UA string detection
ctx['msie'] = ('MSIE' in useragent
and 'AppleWebKit' not in useragent
and 'Gecko' not in useragent
and 'Opera' not in useragent)
except KeyError:
ctx['msie'] = False
if nexturl is None:
nexturl = '/'
# We are already logged in. If it is a POST, they might be trying to
# clobber their session with some new credentials. That's their own
# business, so we let them do it. Otherwise, we don't bother prompting
# and just redirect to the destination.
# Note that req.user is None even if we are 'logged in', if the user is
# invalid (state != enabled, or expired).
if req.method != "POST" and req.user is not None:
req.throw_redirect(nexturl)
# Don't give any URL if we want /.
if nexturl == '/':
query_string = ''
else:
query_string = '?url=' + urllib.quote(nexturl, safe="/~")
ctx['path'] = req.make_path('+login') + query_string
# If this succeeds, the user is invalid.
user = ivle.webapp.security.get_user_details(req)
if user is not None:
if user.state == "no_agreement":
# Authenticated, but need to accept the ToS. Send them there.
# IMPORTANT NOTE FOR HACKERS: You can't simply disable this
# if you are not planning to display a ToS page - the ToS
# acceptance process actually calls usrmgt to create the user
# jails and related stuff.
req.throw_redirect(req.make_path('+tos') + query_string)
elif user.state == "pending":
# FIXME: this isn't quite the right answer, but it
# should be more robust in the short term.
session = req.get_session()
session.invalidate()
session.delete()
user.state = u'no_agreement'
req.store.commit()
req.throw_redirect(nexturl)
if req.method == "POST":
# While req.user is normally set to get_user_details, it won't set
# it if the account isn't valid. So we get it ourselves.
user = ivle.webapp.security.get_user_details(req)
badlogin = None
username = fields.getfirst('user')
password = fields.getfirst('pass')
if username is not None:
# From this point onwards, we will be showing an error message
# if unsuccessful.
# Authenticate
if password is None:
badlogin = "No password supplied."
else:
user = None
try:
# Username is case insensitive
user = authenticate.authenticate(req.config, req.store,
username.value.lower(), password.value)
except AuthError, msg:
badlogin = msg
if user is None:
# Must have got an error. Do not authenticate.
# The except: above will have set a message.
pass
else:
# Success - Set the session and redirect to the URL.
session = req.get_session()
session['login'] = user.login
session.save()
session.unlock()
user.last_login = datetime.datetime.now()
# Create cookies for plugins that might request them.
for plugin in req.config.plugin_index[CookiePlugin]:
for cookie in plugin.cookies:
# The function can be None if they just need to be
# deleted at logout.
if plugin.cookies[cookie] is not None:
req.add_cookie(mod_python.Cookie.Cookie(cookie,
plugin.cookies[cookie](user), path='/'))
# Add any new enrolments.
ivle.pulldown_subj.enrol_user(req.config, req.store, user)
req.store.commit()
req.throw_redirect(nexturl)
# We didn't succeed.
# Render the login form with the error message.
ctx['error'] = badlogin
class LogoutView(XHTMLView):
'''A view to log the current session out.'''
template = 'logout.html'
allow_overlays = False
def authorize(self, req):
# This can be used by any authenticated user, even if they haven't
# accepted the ToS yet.
return ivle.webapp.security.get_user_details(req) is not None
def populate(self, req, ctx):
if req.method == "POST":
req.logout()
else:
ctx['path'] = req.make_path('+logout')
| dcoles/ivle | ivle/webapp/security/views.py | Python | gpl-2.0 | 6,811 | 0.001175 |
# coding=gbk
import os
import re
import string
def isMov(filename):
# ÅжÏÊÇ·ñΪµçÓ°Îļþ
suffix = filename.split('.')[-1].lower() # ÌáÈ¡ºó׺
pattern = re.compile(r'mpg|mpeg|m2v|mkv|dat|vob|avi|wmv|rm|ram|rmvb|mov|avi|mp4|qt|viv')
if pattern.search(suffix): # Æ¥ÅäÊÇ·ñΪµçÓ°¸ñʽ
return True
else:
return False
if __name__=='__main__':
# ±éÀúµ±Ç°Ä¿Â¼
print '´¦ÀíÖС¡'
cnt = 1
for fp in os.listdir(os.getcwd()):
if os.path.isfile(fp) and isMov(fp): # ÊǵçÓ°Îļþ
if fp[0]=='[': # È¥µô¿ªÍ·µÄ[]
index = fp.find(']')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+1:])
os.rename(fp,fp[index+1:])
fp = fp[index+1:]
cnt+=1
elif fp[:2]=='¡¾': # È¥µô¿ªÍ·µÄ¡¾¡¿
index = fp.find('¡¿')
if index!=-1:
print '[%d] %s ==> %s'%(cnt,fp,fp[index+2:])
os.rename(fp,fp[index+2:])
fp = fp[index+2:]
cnt+=1
if fp[0] =='.' or fp[0]=='-': # È¥µô¿ªÍ·µÄ'.' »ò '-'
print '[%d] %s ==> %s'%(cnt,fp,fp[1:])
os.rename(fp,fp[1:])
if cnt==1:
print 'ûÓÐÐèÒª´¦ÀíµÄµçÓ°Îļþ'
else:
print '´¦ÀíÍê±Ï'
| windcode/xtools | CleanMoviePrefix.py | Python | mit | 1,426 | 0.019635 |
from __future__ import division, print_function, absolute_import
from scipy import stats
import numpy as np
from numpy.testing import assert_almost_equal, assert_, assert_raises, \
assert_array_almost_equal, assert_array_almost_equal_nulp, run_module_suite
def test_kde_1d():
#some basic tests comparing to normal distribution
np.random.seed(8765678)
n_basesample = 500
xn = np.random.randn(n_basesample)
xnmean = xn.mean()
xnstd = xn.std(ddof=1)
# get kde for original sample
gkde = stats.gaussian_kde(xn)
# evaluate the density function for the kde for some points
xs = np.linspace(-7,7,501)
kdepdf = gkde.evaluate(xs)
normpdf = stats.norm.pdf(xs, loc=xnmean, scale=xnstd)
intervall = xs[1] - xs[0]
assert_(np.sum((kdepdf - normpdf)**2)*intervall < 0.01)
prob1 = gkde.integrate_box_1d(xnmean, np.inf)
prob2 = gkde.integrate_box_1d(-np.inf, xnmean)
assert_almost_equal(prob1, 0.5, decimal=1)
assert_almost_equal(prob2, 0.5, decimal=1)
assert_almost_equal(gkde.integrate_box(xnmean, np.inf), prob1, decimal=13)
assert_almost_equal(gkde.integrate_box(-np.inf, xnmean), prob2, decimal=13)
assert_almost_equal(gkde.integrate_kde(gkde),
(kdepdf**2).sum()*intervall, decimal=2)
assert_almost_equal(gkde.integrate_gaussian(xnmean, xnstd**2),
(kdepdf*normpdf).sum()*intervall, decimal=2)
def test_kde_bandwidth_method():
def scotts_factor(kde_obj):
"""Same as default, just check that it works."""
return np.power(kde_obj.n, -1./(kde_obj.d+4))
np.random.seed(8765678)
n_basesample = 50
xn = np.random.randn(n_basesample)
# Default
gkde = stats.gaussian_kde(xn)
# Supply a callable
gkde2 = stats.gaussian_kde(xn, bw_method=scotts_factor)
# Supply a scalar
gkde3 = stats.gaussian_kde(xn, bw_method=gkde.factor)
xs = np.linspace(-7,7,51)
kdepdf = gkde.evaluate(xs)
kdepdf2 = gkde2.evaluate(xs)
assert_almost_equal(kdepdf, kdepdf2)
kdepdf3 = gkde3.evaluate(xs)
assert_almost_equal(kdepdf, kdepdf3)
assert_raises(ValueError, stats.gaussian_kde, xn, bw_method='wrongstring')
# Subclasses that should stay working (extracted from various sources).
# Unfortunately the earlier design of gaussian_kde made it necessary for users
# to create these kinds of subclasses, or call _compute_covariance() directly.
class _kde_subclass1(stats.gaussian_kde):
def __init__(self, dataset):
self.dataset = np.atleast_2d(dataset)
self.d, self.n = self.dataset.shape
self.covariance_factor = self.scotts_factor
self._compute_covariance()
class _kde_subclass2(stats.gaussian_kde):
def __init__(self, dataset):
self.covariance_factor = self.scotts_factor
super(_kde_subclass2, self).__init__(dataset)
class _kde_subclass3(stats.gaussian_kde):
def __init__(self, dataset, covariance):
self.covariance = covariance
stats.gaussian_kde.__init__(self, dataset)
def _compute_covariance(self):
self.inv_cov = np.linalg.inv(self.covariance)
self._norm_factor = np.sqrt(np.linalg.det(2*np.pi * self.covariance)) \
* self.n
class _kde_subclass4(stats.gaussian_kde):
def covariance_factor(self):
return 0.5 * self.silverman_factor()
def test_gaussian_kde_subclassing():
x1 = np.array([-7, -5, 1, 4, 5], dtype=np.float)
xs = np.linspace(-10, 10, num=50)
# gaussian_kde itself
kde = stats.gaussian_kde(x1)
ys = kde(xs)
# subclass 1
kde1 = _kde_subclass1(x1)
y1 = kde1(xs)
assert_array_almost_equal_nulp(ys, y1, nulp=10)
# subclass 2
kde2 = _kde_subclass2(x1)
y2 = kde2(xs)
assert_array_almost_equal_nulp(ys, y2, nulp=10)
# subclass 3
kde3 = _kde_subclass3(x1, kde.covariance)
y3 = kde3(xs)
assert_array_almost_equal_nulp(ys, y3, nulp=10)
# subclass 4
kde4 = _kde_subclass4(x1)
y4 = kde4(x1)
y_expected = [0.06292987, 0.06346938, 0.05860291, 0.08657652, 0.07904017]
assert_array_almost_equal(y_expected, y4, decimal=6)
# Not a subclass, but check for use of _compute_covariance()
kde5 = kde
kde5.covariance_factor = lambda: kde.factor
kde5._compute_covariance()
y5 = kde5(xs)
assert_array_almost_equal_nulp(ys, y5, nulp=10)
def test_gaussian_kde_covariance_caching():
x1 = np.array([-7, -5, 1, 4, 5], dtype=np.float)
xs = np.linspace(-10, 10, num=5)
# These expected values are from scipy 0.10, before some changes to
# gaussian_kde. They were not compared with any external reference.
y_expected = [0.02463386, 0.04689208, 0.05395444, 0.05337754, 0.01664475]
# Set the bandwidth, then reset it to the default.
kde = stats.gaussian_kde(x1)
kde.set_bandwidth(bw_method=0.5)
kde.set_bandwidth(bw_method='scott')
y2 = kde(xs)
assert_array_almost_equal(y_expected, y2, decimal=7)
def test_gaussian_kde_monkeypatch():
"""Ugly, but people may rely on this. See scipy pull request 123,
specifically the linked ML thread "Width of the Gaussian in stats.kde".
If it is necessary to break this later on, that is to be discussed on ML.
"""
x1 = np.array([-7, -5, 1, 4, 5], dtype=np.float)
xs = np.linspace(-10, 10, num=50)
# The old monkeypatched version to get at Silverman's Rule.
kde = stats.gaussian_kde(x1)
kde.covariance_factor = kde.silverman_factor
kde._compute_covariance()
y1 = kde(xs)
# The new saner version.
kde2 = stats.gaussian_kde(x1, bw_method='silverman')
y2 = kde2(xs)
assert_array_almost_equal_nulp(y1, y2, nulp=10)
def test_kde_integer_input():
"""Regression test for #1181."""
x1 = np.arange(5)
kde = stats.gaussian_kde(x1)
y_expected = [0.13480721, 0.18222869, 0.19514935, 0.18222869, 0.13480721]
assert_array_almost_equal(kde(x1), y_expected, decimal=6)
if __name__ == "__main__":
run_module_suite()
| GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/scipy/stats/tests/test_kdeoth.py | Python | agpl-3.0 | 6,021 | 0.000997 |
from psycopg2 import InterfaceError
class CursorContextManager:
"""Instantiated once per :func:`~postgres.Postgres.get_cursor` call.
:param pool: see :mod:`psycopg2_pool`
:param bool autocommit: see :attr:`psycopg2:connection.autocommit`
:param bool readonly: see :attr:`psycopg2:connection.readonly`
:param cursor_kwargs: passed to :meth:`psycopg2:connection.cursor`
During construction, a connection is checked out of the connection pool
and its :attr:`autocommit` and :attr:`readonly` attributes are set, then a
:class:`psycopg2:cursor` is created from that connection.
Upon exit of the ``with`` block, the connection is rolled back if an
exception was raised, or committed otherwise. There are two exceptions to
this:
1. if :attr:`autocommit` is :obj:`True`, then the connection is neither
rolled back nor committed;
2. if :attr:`readonly` is :obj:`True`, then the connection is always rolled
back, never committed.
In all cases the cursor is closed and the connection is put back in the pool.
"""
__slots__ = ('pool', 'conn', 'cursor')
def __init__(self, pool, autocommit=False, readonly=False, **cursor_kwargs):
self.pool = pool
conn = self.pool.getconn()
conn.autocommit = autocommit
conn.readonly = readonly
self.cursor = conn.cursor(**cursor_kwargs)
self.conn = conn
def __enter__(self):
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
"""Put our connection back in the pool.
"""
self.cursor.close()
self.conn.__exit__(exc_type, exc_val, exc_tb)
self.pool.putconn(self.conn)
class ConnectionCursorContextManager:
"""Creates a cursor from the given connection, then wraps it in a context
manager that automatically commits or rolls back the changes on exit.
:param conn: a :class:`psycopg2:connection`
:param bool autocommit: see :attr:`psycopg2:connection.autocommit`
:param bool readonly: see :attr:`psycopg2:connection.readonly`
:param cursor_kwargs: passed to :meth:`psycopg2:connection.cursor`
During construction, the connection's :attr:`autocommit` and :attr:`readonly`
attributes are set, then :meth:`psycopg2:connection.cursor` is called with
`cursor_kwargs`.
Upon exit of the ``with`` block, the connection is rolled back if an
exception was raised, or committed otherwise. There are two exceptions to
this:
1. if :attr:`autocommit` is :obj:`True`, then the connection is neither
rolled back nor committed;
2. if :attr:`readonly` is :obj:`True`, then the connection is always rolled
back, never committed.
In all cases the cursor is closed.
"""
__slots__ = ('conn', 'cursor')
def __init__(self, conn, autocommit=False, readonly=False, **cursor_kwargs):
conn.autocommit = autocommit
conn.readonly = readonly
self.conn = conn
self.cursor = conn.cursor(**cursor_kwargs)
def __enter__(self):
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
self.cursor.close()
self.conn.__exit__(exc_type, exc_val, exc_tb)
class CursorSubcontextManager:
"""Wraps a cursor so that it can be used for a subtransaction.
See :meth:`~postgres.Postgres.get_cursor` for an explanation of subtransactions.
:param cursor: the :class:`psycopg2:cursor` to wrap
:param back_as: temporarily overwrites the cursor's
:attr:`~postgres.cursors.SimpleCursorBase.back_as` attribute
"""
__slots__ = ('cursor', 'back_as', 'outer_back_as')
PRESERVE = object()
def __init__(self, cursor, back_as=PRESERVE):
self.cursor = cursor
self.back_as = back_as
def __enter__(self):
if self.back_as is not self.PRESERVE:
self.outer_back_as = self.cursor.back_as
self.cursor.back_as = self.back_as
return self.cursor
def __exit__(self, exc_type, exc_val, exc_tb):
if self.back_as is not self.PRESERVE:
self.cursor.back_as = self.outer_back_as
class ConnectionContextManager:
"""Instantiated once per :func:`~postgres.Postgres.get_connection` call.
:param pool: see :mod:`psycopg2_pool`
:param bool autocommit: see :attr:`psycopg2:connection.autocommit`
:param bool readonly: see :attr:`psycopg2:connection.readonly`
This context manager checks out a connection out of the specified pool, sets
its :attr:`autocommit` and :attr:`readonly` attributes.
The :meth:`__enter__` method returns the :class:`~postgres.Connection`.
The :meth:`__exit__` method rolls back the connection and puts it back in
the pool.
"""
__slots__ = ('pool', 'conn')
def __init__(self, pool, autocommit=False, readonly=False):
self.pool = pool
conn = self.pool.getconn()
conn.autocommit = autocommit
conn.readonly = readonly
self.conn = conn
def __enter__(self):
return self.conn
def __exit__(self, *exc_info):
"""Put our connection back in the pool.
"""
try:
self.conn.rollback()
except InterfaceError:
pass
self.pool.putconn(self.conn)
| gratipay/postgres.py | postgres/context_managers.py | Python | mit | 5,280 | 0.001136 |
#!/usr/bin/env python3
import urllib3
import sys
import os
import json
from datetime import datetime
import urllib.parse
import requests
import time
import argparse
urllib3.disable_warnings()
debug = os.getenv('DEBUG', 0)
batch_size = 100
workspace = os.getenv('WORKSPACE', os.getcwd())
user_name = 'jenkins-testerdh'
user_password = os.getenv('JENKINS_PASSWD', "None")
def json_serial(obj):
if isinstance(obj, datetime):
serial = obj.isoformat()
return serial
raise TypeError("Type not serializable")
def json_print(json_obj):
return json.dumps(json_obj, indent=4, sort_keys=True, default=json_serial)
def request_get(url):
r = requests.get(url, auth=(user_name, user_password), verify=False)
return r
def info(msg):
print('\033[34m[Info]\033[0m {}'.format(msg))
return
def warn(msg):
print('\033[33m[Warn]\033[0m {}'.format(msg))
sys.exit(1)
def error(msg):
print('\033[31m[Error]\033[0m {}'.format(msg))
sys.exit(1)
def parse_args():
parser = argparse.ArgumentParser(description='Compare dev and master branch.')
parser.add_argument('--repo-url', '-r', default=None, dest='repo_url', help='Git clone URL with ssh syntax')
args, unknown = parser.parse_known_args()
return args, unknown
def main(argv):
start_time = time.time()
args, unknown = parse_args()
info('Starting')
info('Workspace {}'.format(workspace))
master_merge_file = '{}/merge.master'.format(workspace)
from_dev = urllib.parse.quote_plus('refs/heads/dev')
to_master = urllib.parse.quote_plus('refs/heads/master')
project_url = None
if args.repo_url is None:
error('--repo-url is not defined.')
return 1
try:
git_host = args.repo_url.split('@')[1]
git_url = 'https://{}/rest/api/1.0/projects'.format(git_host.split(':')[0])
project_key = args.repo_url.split('/')[3]
repo_name = args.repo_url.split('/')[-1:][0]
repo_name = repo_name.replace('.git', '')
project_url = '{}/{}/repos/{}'.format(git_url, project_key, repo_name)
if debug == 1:
info(project_url)
except IndexError:
error('Git clone repo url unknown format.')
response = request_get(project_url)
if int(response.status_code) == 200:
if 'id' in response.json():
repo_id = response.json()['id']
else:
error('Repository ID not found.')
error(response.json())
return 1
else:
error('HTTP error {}'.format(response.status_code))
return 1
compare_branch = 'compare/commits?from={}&to={}&fromRepo={}&limit=1'.format(from_dev, to_master, repo_id)
compare_url = '{}/{}'.format(project_url, compare_branch)
if debug == 1:
info(compare_url)
response = request_get(compare_url)
if int(response.status_code) == 200:
if debug == 1:
info('Headers: {}'.format(dict(response.headers)))
info('Encoding: {}'.format(response.encoding))
info('Text: {}'.format(response.json()))
change_size = response.json()['size']
if change_size == 0:
info('{} has no change between branch dev and master'.format(args.repo_url))
else:
info('{} changes between branch dev and master'.format(args.repo_url))
master_merge_fh = open(master_merge_file, 'w')
master_merge_fh.close()
else:
error('HTTP error {}'.format(response.status_code))
info('Finished - execution time %.2f seconds' % (time.time() - start_time))
return
if __name__ == "__main__":
sys.exit(main(sys.argv))
| gwsu2008/automation | python/git-branch-diff.py | Python | gpl-2.0 | 3,650 | 0.001644 |
## PyZUI 0.1 - Python Zooming User Interface
## Copyright (C) 2009 David Roberts <d@vidr.cc>
##
## This program is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License
## as published by the Free Software Foundation; either version 2
## of the License, or (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
## 02110-1301, USA.
"""Dynamic tile provider for Barnsley's fern."""
import random
import Image
from dynamictileprovider import DynamicTileProvider
class FernTileProvider(DynamicTileProvider):
"""FernTileProvider objects are used for generating tiles of Barnsley's
fern iterated function system.
Constructor: FernTileProvider(TileCache)
"""
def __init__(self, tilecache):
DynamicTileProvider.__init__(self, tilecache)
filext = 'png'
tilesize = 256
aspect_ratio = 1.0
max_iterations = 50000
max_points = 10000
transformations = [
## (probability, (a, b, c, d, e, f))
## x_n+1 = a*x_n + b*y_n + c
## y_n+1 = d*x_n + e*y_n + f
## for details about the transformations, see:
## <http://en.wikipedia.org/wiki/Barnsley's_fern>
## <http://books.google.com/books?id=oh7NoePgmOIC
## &printsec=frontcover#PPA86,M1>
## <http://mathworld.wolfram.com/BarnsleysFern.html>
## <http://www.home.aone.net.au/~byzantium/ferns/fractal.html>
## rachis
(0.01, ( 0.00, 0.00, 0.00, 0.00, 0.16, 0.00)),
## left hand first pinna
(0.07, ( 0.20, -0.26, 0.00, 0.23, 0.22, 1.60)),
## right hand first pinna
(0.07, (-0.15, 0.28, 0.00, 0.26, 0.24, 0.44)),
## body of fern
(0.85, ( 0.85, 0.04, 0.00, -0.04, 0.85, 1.60)),
]
color = (100, 170, 0)
def __choose_transformation(self):
"""Randomly choose a transformation based on the probability of each
transformation being chosen.
__choose_transformation() -> tuple<float,float,float,float,float,float>
"""
n = random.uniform(0,1)
for probability, transformation in self.transformations:
if n <= probability:
break
else:
n -= probability
return transformation
def __transform(self, x, y):
"""Randomly choose a transformation and apply it to x and y, returning
the result as a tuple.
__transform(float, float) -> tuple<float,float>
"""
t = self.__choose_transformation()
x_new = t[0]*x + t[1]*y + t[2]
y_new = t[3]*x + t[4]*y + t[5]
return (x_new,y_new)
def __draw_point(self, tile, x, y, tilesize_units):
"""Draw the given point on the given tile.
__draw_point(Image, float, float, float) -> None
Precondition: 0.0 <= x <= tilesize_units
Precondition: 0.0 <= y <= tilesize_units
"""
x = x * self.tilesize / tilesize_units
x = min(int(x), self.tilesize-1)
y = y * self.tilesize / tilesize_units
y = min(int(self.tilesize - y), self.tilesize-1)
tile.putpixel((x,y), self.color)
def _load_dynamic(self, tile_id, outfile):
media_id, tilelevel, row, col = tile_id
if row < 0 or col < 0 or \
row > 2**tilelevel - 1 or col > 2**tilelevel - 1:
## row,col out of range
return
tilesize_units = 10.0 * 2**-tilelevel
x = col * tilesize_units
y = row * tilesize_units
## the corners of the tile are:
## (x1,y2) +----+ (x2,y2)
## | |
## (x1,y1) +----+ (x2,y1)
x1 = x - 5.0
y2 = 10.0 - y
x2 = x1 + tilesize_units
y1 = y2 - tilesize_units
tile = Image.new('RGB', (self.tilesize,self.tilesize))
num_points = 0
x = 0.0
y = 0.0
for i in xrange(self.max_iterations):
if x1 <= x <= x2 and y1 <= y <= y2:
self.__draw_point(
tile, x-x1, y-y1, tilesize_units)
num_points += 1
if num_points > self.max_points:
break
x,y = self.__transform(x,y)
tile.save(outfile)
| davidar/pyzui | pyzui/ferntileprovider.py | Python | gpl-2.0 | 4,684 | 0.009821 |
"""
Tests for users API
"""
import datetime
from django.utils import timezone
from xmodule.modulestore.tests.factories import ItemFactory, CourseFactory
from xmodule.modulestore.django import modulestore
from student.models import CourseEnrollment
from .. import errors
from ..testutils import MobileAPITestCase, MobileAuthTestMixin, MobileAuthUserTestMixin, MobileEnrolledCourseAccessTestMixin
from .serializers import CourseEnrollmentSerializer
class TestUserDetailApi(MobileAPITestCase, MobileAuthUserTestMixin):
"""
Tests for /api/mobile/v0.5/users/<user_name>...
"""
REVERSE_INFO = {'name': 'user-detail', 'params': ['username']}
def test_success(self):
self.login()
response = self.api_response()
self.assertEqual(response.data['username'], self.user.username)
self.assertEqual(response.data['email'], self.user.email)
class TestUserInfoApi(MobileAPITestCase, MobileAuthTestMixin):
"""
Tests for /api/mobile/v0.5/my_user_info
"""
def reverse_url(self, reverse_args=None, **kwargs):
return '/api/mobile/v0.5/my_user_info'
def test_success(self):
"""Verify the endpoint redirects to the user detail endpoint"""
self.login()
response = self.api_response(expected_response_code=302)
self.assertTrue(self.username in response['location'])
class TestUserEnrollmentApi(MobileAPITestCase, MobileAuthUserTestMixin, MobileEnrolledCourseAccessTestMixin):
"""
Tests for /api/mobile/v0.5/users/<user_name>/course_enrollments/
"""
REVERSE_INFO = {'name': 'courseenrollment-detail', 'params': ['username']}
ALLOW_ACCESS_TO_UNRELEASED_COURSE = True
def verify_success(self, response):
super(TestUserEnrollmentApi, self).verify_success(response)
courses = response.data
self.assertEqual(len(courses), 1)
found_course = courses[0]['course']
self.assertTrue('video_outline' in found_course)
self.assertTrue('course_handouts' in found_course)
self.assertEqual(found_course['id'], unicode(self.course.id))
self.assertEqual(courses[0]['mode'], 'honor')
def verify_failure(self, response):
self.assertEqual(response.status_code, 200)
courses = response.data
self.assertEqual(len(courses), 0)
def test_sort_order(self):
self.login()
num_courses = 3
courses = []
for course_num in range(num_courses):
courses.append(CourseFactory.create(mobile_available=True))
self.enroll(courses[course_num].id)
# verify courses are returned in the order of enrollment, with most recently enrolled first.
response = self.api_response()
for course_num in range(num_courses):
self.assertEqual(
response.data[course_num]['course']['id'], # pylint: disable=no-member
unicode(courses[num_courses - course_num - 1].id)
)
class CourseStatusAPITestCase(MobileAPITestCase):
"""
Base test class for /api/mobile/v0.5/users/<user_name>/course_status_info/{course_id}
"""
REVERSE_INFO = {'name': 'user-course-status', 'params': ['username', 'course_id']}
def _setup_course_skeleton(self):
"""
Creates a basic course structure for our course
"""
section = ItemFactory.create(
parent_location=self.course.location,
)
sub_section = ItemFactory.create(
parent_location=section.location,
)
unit = ItemFactory.create(
parent_location=sub_section.location,
)
other_unit = ItemFactory.create(
parent_location=sub_section.location,
)
return section, sub_section, unit, other_unit
class TestCourseStatusGET(CourseStatusAPITestCase, MobileAuthUserTestMixin, MobileEnrolledCourseAccessTestMixin):
"""
Tests for GET of /api/mobile/v0.5/users/<user_name>/course_status_info/{course_id}
"""
def test_success(self):
self.login_and_enroll()
(section, sub_section, unit, __) = self._setup_course_skeleton()
response = self.api_response()
self.assertEqual(response.data["last_visited_module_id"], unicode(unit.location))
self.assertEqual(
response.data["last_visited_module_path"],
[unicode(module.location) for module in [unit, sub_section, section, self.course]]
)
class TestCourseStatusPATCH(CourseStatusAPITestCase, MobileAuthUserTestMixin, MobileEnrolledCourseAccessTestMixin):
"""
Tests for PATCH of /api/mobile/v0.5/users/<user_name>/course_status_info/{course_id}
"""
def url_method(self, url, **kwargs):
# override implementation to use PATCH method.
return self.client.patch(url, data=kwargs.get('data', None)) # pylint: disable=no-member
def test_success(self):
self.login_and_enroll()
(__, __, __, other_unit) = self._setup_course_skeleton()
response = self.api_response(data={"last_visited_module_id": unicode(other_unit.location)})
self.assertEqual(response.data["last_visited_module_id"], unicode(other_unit.location))
def test_invalid_module(self):
self.login_and_enroll()
response = self.api_response(data={"last_visited_module_id": "abc"}, expected_response_code=400)
self.assertEqual(response.data, errors.ERROR_INVALID_MODULE_ID)
def test_nonexistent_module(self):
self.login_and_enroll()
non_existent_key = self.course.id.make_usage_key('video', 'non-existent')
response = self.api_response(data={"last_visited_module_id": non_existent_key}, expected_response_code=400)
self.assertEqual(response.data, errors.ERROR_INVALID_MODULE_ID)
def test_no_timezone(self):
self.login_and_enroll()
(__, __, __, other_unit) = self._setup_course_skeleton()
past_date = datetime.datetime.now()
response = self.api_response(
data={
"last_visited_module_id": unicode(other_unit.location),
"modification_date": past_date.isoformat() # pylint: disable=maybe-no-member
},
expected_response_code=400
)
self.assertEqual(response.data, errors.ERROR_INVALID_MODIFICATION_DATE)
def _date_sync(self, date, initial_unit, update_unit, expected_unit):
"""
Helper for test cases that use a modification to decide whether
to update the course status
"""
self.login_and_enroll()
# save something so we have an initial date
self.api_response(data={"last_visited_module_id": unicode(initial_unit.location)})
# now actually update it
response = self.api_response(
data={
"last_visited_module_id": unicode(update_unit.location),
"modification_date": date.isoformat()
}
)
self.assertEqual(response.data["last_visited_module_id"], unicode(expected_unit.location))
def test_old_date(self):
self.login_and_enroll()
(__, __, unit, other_unit) = self._setup_course_skeleton()
date = timezone.now() + datetime.timedelta(days=-100)
self._date_sync(date, unit, other_unit, unit)
def test_new_date(self):
self.login_and_enroll()
(__, __, unit, other_unit) = self._setup_course_skeleton()
date = timezone.now() + datetime.timedelta(days=100)
self._date_sync(date, unit, other_unit, other_unit)
def test_no_initial_date(self):
self.login_and_enroll()
(__, __, _, other_unit) = self._setup_course_skeleton()
response = self.api_response(
data={
"last_visited_module_id": unicode(other_unit.location),
"modification_date": timezone.now().isoformat()
}
)
self.assertEqual(response.data["last_visited_module_id"], unicode(other_unit.location))
def test_invalid_date(self):
self.login_and_enroll()
response = self.api_response(data={"modification_date": "abc"}, expected_response_code=400)
self.assertEqual(response.data, errors.ERROR_INVALID_MODIFICATION_DATE)
class TestCourseEnrollmentSerializer(MobileAPITestCase):
"""
Test the course enrollment serializer
"""
def test_success(self):
self.login_and_enroll()
serialized = CourseEnrollmentSerializer(CourseEnrollment.enrollments_for_user(self.user)[0]).data # pylint: disable=no-member
self.assertEqual(serialized['course']['video_outline'], None)
self.assertEqual(serialized['course']['name'], self.course.display_name)
self.assertEqual(serialized['course']['number'], self.course.id.course)
self.assertEqual(serialized['course']['org'], self.course.id.org)
def test_with_display_overrides(self):
self.login_and_enroll()
self.course.display_coursenumber = "overridden_number"
self.course.display_organization = "overridden_org"
modulestore().update_item(self.course, self.user.id)
serialized = CourseEnrollmentSerializer(CourseEnrollment.enrollments_for_user(self.user)[0]).data # pylint: disable=no-member
self.assertEqual(serialized['course']['number'], self.course.display_coursenumber)
self.assertEqual(serialized['course']['org'], self.course.display_organization)
| olexiim/edx-platform | lms/djangoapps/mobile_api/users/tests.py | Python | agpl-3.0 | 9,422 | 0.002972 |
from time import strftime, monotonic
import pigpio
import CABmanager
class clock(object):
"""Clock"""
def __init__( self, pigpio_pi ):
if type( pigpio_pi ) is CAB_manager.CABmanager:
self.pigpio_pi = pigpio_pi.pi
else:
self.pigpio_pi = pigpio_pi
self.tickDiff = pigpio.tickDiff
self.get_current_tick = pi.get_current_tick
self.gpio_time_1 = self.get_current_tick()
self.gpio_time_2 = 0
self.time_now = 0
def update( self ):
self.gpio_time_2 = self.get_current_tick()
self.time_now += self.tickDiff( self.gpio_time_1, self.gpio_time_2 )/1000000.0
self.gpio_time_1 = self.gpio_time_2
return( self.time_now )
def assert_update( self, gpio_time_2 ):
self.gpio_time_2 = gpio_time_2
self.time_now += self.tickDiff( self.gpio_time_1, self.gpio_time_2 )/1000000.0
self.gpio_time_1 = self.gpio_time_2
return( self.time_now )
def get_time( self ):
return( self.time_now )
def reset( self ):
self.gpio_time_1 = self.get_current_tick()
self.gpio_time_2 = 0
self.time_now = 0
def sleep( self, seconds ):
if seconds <= 0.5:
t1 = monotonic()
while monotonic() - t1 < seconds:
pass
else:
t1 = monotonic()
sleep( seconds - 0.4 )
while monotonic() - t1 < seconds:
pass
return( self.update() )
def get_date_dmy():
"""
Return the date in dd.mm.yyyy format as a string.
"""
return( strftime( "%d.%m.%Y" ) )
def get_date_hm():
"""
Return the time in hh.mm format as a string.
"""
return( strftime( "%H.%M" ) )
def get_date_hmdmy():
"""
Return the time in hh_mm_dd_mm_yyy format as a string
"""
return( strftime( "%H_%M_%d_%m_%Y" ) ) | Don-Li/CABexpt | CABexpt/clock.py | Python | gpl-3.0 | 1,906 | 0.026233 |
from django.db import models
from django.contrib import auth
from django.db.models.signals import post_save
class Priority(object):
Aggregate_Admin = 12000
Campus_Admin = 10000
Department_Admin = 8000
Building_Admin = 6000
Group_Admin = 4000
Strict_User = 2000
Nice_User = 1000
Priority_Margin = 2000
Strict_Priority_Offset = 1000
Priority_Scale = 1000
Admins = ["Aggragate Admin", "Campus Admin", "Department Admin",
"Building Admin", "Group Admin"]
class UserProfile(models.Model):
user = models.ForeignKey(auth.models.User, unique=True, related_name = 'profile')
is_net_admin = models.BooleanField("Can Confirm Flow Space Requests", default=False)
is_clearinghouse_user = models.BooleanField("Clearinghouse account", default=False)
max_priority_level = models.IntegerField(null=True) # Otherwise will complain
supervisor = models.ForeignKey(auth.models.User, related_name = 'supervisor')
admin_position = models.CharField(max_length = 1024, default="")
def __unicode__(self):
try:
return "Profile for %s" % self.user
except:
return "No user"
@classmethod
def get_or_create_profile(cls, user):
try:
profile = user.get_profile()
except UserProfile.DoesNotExist:
profile = cls.objects.create(
user=user,
is_net_admin = False,
max_priority_level = Priority.Strict_User,
supervisor = user,
is_clearinghouse_user = False,
)
return profile
from openflow.optin_manager.users.user_signal_handler import super_user_save
post_save.connect(super_user_save, sender=auth.models.User)
| dana-i2cat/felix | optin_manager/src/python/openflow/optin_manager/users/models.py | Python | apache-2.0 | 1,961 | 0.022947 |
import logging
import pickle
import random
from gevent import Greenlet, sleep
from threading import Lock
from app import create_app
from dota_bot import DotaBot
from models import db, DynamicConfiguration, Game, GameStatus, GameVIP
from helpers.general import divide_vip_list_per_type
# Log
logging.basicConfig(format='[%(asctime)s] %(levelname)s %(message)s', level=logging.INFO)
class Credential:
"""A Steam account credentials.
Attributes:
login: Steam user login.
password: Steam user password.
"""
def __init__(self, login, password):
"""Create a user credentials.
Args:
login: user login.
password: user password.
"""
self.login = login
self.password = password
class WorkerManager(Greenlet):
"""Master class starting Dota bots to process jobs.
The manager contains a initial pool of Steam Credentials.
It is a thread pooling jobs from the database, starting new Dota bots when a new job is available.
After a job process, the Dota bot informs that the credentials are available again.
Attributes:
app: The flask application the manager is linked to, containing configuration objects and database access.
working_bots: A dictionary of all currently working Dota bots, indexed by bot login.
"""
def __init__(self):
"""Initialize the worker manager thread."""
Greenlet.__init__(self)
# Initialize
self.app = create_app()
self.working_bots = {}
self.credentials = []
self.mutex = Lock()
# Parse credentials from config
bot_credentials_string = self.app.config['STEAM_BOTS']
bot_credentials = bot_credentials_string.split('@')
i = 0
while i < len(bot_credentials):
login = bot_credentials[i]
password = bot_credentials[i+1]
self.credentials.append(Credential(login, password))
i = i + 2
def _run(self):
"""Start the main loop of the thread, creating Dota bots to process available jobs."""
while True:
with self.app.app_context():
admins, casters = divide_vip_list_per_type(GameVIP.get_all_vips())
bot_pause = DynamicConfiguration.get('bot_pause', 'False')
for game in db.session().query(Game)\
.filter(Game.status==GameStatus.WAITING_FOR_BOT)\
.order_by(Game.id).all():
if len(self.credentials) == 0 or bot_pause == 'True':
continue
# Start a Dota bot to process the game
self.mutex.acquire()
credential = self.credentials.pop(random.randint(0, len(self.credentials) - 1))
g = DotaBot(self, credential, admins, casters, game.id, game.name, game.password,
game.team1, game.team2, game.team1_ids, game.team2_ids, game.team_choosing_first)
self.working_bots[credential.login] = g
game.status = GameStatus.CREATION_IN_PROGRESS
game.bot = credential.login
db.session().commit()
g.start()
self.mutex.release()
sleep(60)
def bot_end(self, credential):
"""Signal that a bot has finished it work and the credential is free to use again.
Args:
credential: `Credential` of the bot.
"""
self.mutex.acquire()
self.working_bots.pop(credential.login)
self.credentials.append(credential)
self.mutex.release()
# Start a Manager if this file is the main script.
if __name__ == '__main__':
g = WorkerManager()
g.start()
g.join()
| FroggedTV/grenouilleAPI | backend/bot_app.py | Python | gpl-3.0 | 3,850 | 0.003377 |
import datetime
import logging
from django.contrib.auth.models import User
from django.test import Client
from django.test import TestCase
from django.urls import reverse
from person.models import Person
from parliament.models import ParliamentMember
from parliament.models import PoliticalParty
from document.models import Agenda
from document.models import CategoryDossier
from document.models import CategoryDocument
from document.models import Dossier
from document.models import Document
from document.models import Kamerstuk
from document.models import Voting
import openkamer.document
import openkamer.dossier
import openkamer.kamerstuk
logger = logging.getLogger(__name__)
class TestExample(TestCase):
def test_example(self):
logger.info('BEGIN')
logger.info('END')
class TestFindParliamentMembers(TestCase):
fixtures = ['person.json', 'parliament.json']
def test_find_member(self):
surname = 'Zijlstra'
forename = 'Halbe'
initials = 'H.'
member = ParliamentMember.find(surname=surname, initials=initials)
self.assertEqual(member.person.forename, forename)
def test_find_member_surname_prefix(self):
surname = 'Weyenberg van'
forename = 'Steven'
initials = 'S.P.R.A.'
member = ParliamentMember.find(surname=surname, initials=initials)
self.assertEqual(member.person.forename, forename)
surname = 'van Weyenberg'
member = ParliamentMember.find(surname=surname, initials=initials)
self.assertEqual(member.person.forename, forename)
def test_find_member_non_ascii(self):
surname = 'Koser Kaya'
forename = 'Fatma'
initials = 'F.'
member = ParliamentMember.find(surname=surname, initials=initials)
self.assertEqual(member.person.forename, forename)
surname = 'Koşer Kaya'
member = ParliamentMember.find(surname=surname, initials=initials)
self.assertEqual(member.person.forename, forename)
class TestPersonView(TestCase):
fixtures = ['person.json']
@classmethod
def setUpTestData(cls):
cls.client = Client()
def test_persons_overview(self):
response = self.client.get(reverse('persons'))
self.assertEqual(response.status_code, 200)
def test_person_overview(self):
persons = Person.objects.all()[:10]
for person in persons:
response = self.client.get(reverse('person', args=(person.slug,)))
self.assertEqual(response.status_code, 200)
def test_person_check_view(self):
response = self.client.get(reverse('persons-check'))
self.assertEqual(response.status_code, 200)
class TestWebsite(TestCase):
fixtures = ['person.json', 'parliament.json', 'government.json']
@classmethod
def setUpTestData(cls):
# TODO: improve performance of votings (tkapi)
openkamer.dossier.create_dossier_retry_on_error(33885)
openkamer.dossier.create_dossier_retry_on_error(33506)
cls.client = Client()
def test_homepage(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 200)
def test_persons_overview(self):
response = self.client.get(reverse('persons'))
self.assertEqual(response.status_code, 200)
def test_person_overview(self):
persons = Person.objects.all()[:10]
for person in persons:
response = self.client.get(reverse('person', args=(person.slug,)))
self.assertEqual(response.status_code, 200)
def test_person_autocomplete_view(self):
response = self.client.get(reverse('person-autocomplete') + '?q=samsom')
self.assertEqual(response.status_code, 200)
def test_dossiers_overview(self):
response = self.client.get(reverse('wetsvoorstellen'))
self.assertEqual(response.status_code, 200)
def test_dossiers_filter_view(self):
ivo = Person.objects.filter(forename='Ivo', surname='Opstelten')[0]
response = self.client.get(reverse('wetsvoorstellen') + '?title=wet&submitter=' + str(ivo.id) + '&voting_result=AAN')
self.assertEqual(response.status_code, 200)
def test_dossier_views(self):
dossiers = Dossier.objects.all()
for dossier in dossiers:
response = self.client.get(reverse('dossier-tiles', args=(dossier.dossier_id,)))
self.assertEqual(response.status_code, 200)
def test_timeline_views(self):
dossiers = Dossier.objects.all()
for dossier in dossiers:
response = self.client.get(reverse('dossier-timeline', args=(dossier.dossier_id,)))
self.assertEqual(response.status_code, 200)
def test_timeline_horizontal_views(self):
dossiers = Dossier.objects.all()
for dossier in dossiers:
response = self.client.get(reverse('dossier-timeline-horizontal', args=(dossier.dossier_id,)))
self.assertEqual(response.status_code, 200)
response = self.client.get('/dossier/timeline/horizontal/json/?dossier_pk=' + str(dossier.id))
self.assertEqual(response.status_code, 200)
def test_document_view(self):
documents = Document.objects.all()
for document in documents:
response = self.client.get(reverse('document', args=(document.document_id,)))
self.assertEqual(response.status_code, 200)
def test_kamerstuk_view(self):
kamerstukken = Kamerstuk.objects.all()
for kamerstuk in kamerstukken:
response = self.client.get(reverse('kamerstuk', args=(kamerstuk.id_main, kamerstuk.id_sub,)))
self.assertEqual(response.status_code, 200)
def test_kamerstuk_modifications(self):
kamerstuk_08 = Kamerstuk.objects.get(id_main='33885', id_sub='8')
kamerstuk_11 = Kamerstuk.objects.get(id_main='33885', id_sub='11')
kamerstuk_29 = Kamerstuk.objects.get(id_main='33885', id_sub='29')
kamerstuk_original = Kamerstuk.objects.get(id_main='33885', id_sub='2')
self.assertEqual(kamerstuk_08.original, kamerstuk_original)
self.assertEqual(kamerstuk_11.original, kamerstuk_original)
self.assertEqual(kamerstuk_29.original, kamerstuk_original)
modifications = [kamerstuk_08, kamerstuk_11, kamerstuk_29]
for modification in kamerstuk_original.modifications:
self.assertTrue(modification in modifications)
def test_agendas_view(self):
response = self.client.get('/agendas/')
self.assertEqual(response.status_code, 200)
def test_agenda_view(self):
agendas = Agenda.objects.all()
for agenda in agendas:
response = self.client.get('/agenda/' + str(agenda.agenda_id) + '/')
self.assertEqual(response.status_code, 200)
def test_votings_overview(self):
response = self.client.get(reverse('votings'))
self.assertEqual(response.status_code, 200)
def test_voting_view(self):
votings = Voting.objects.all()
for voting in votings:
if voting.is_dossier_voting:
response = self.client.get(reverse('voting-dossier', args=(voting.dossier.dossier_id,)))
elif voting.kamerstuk:
response = self.client.get(reverse('voting-kamerstuk', args=(voting.kamerstuk.id_main, voting.kamerstuk.id_sub,)))
else:
print('WARNING: no kamerstuk found for voting id: {}'.format(voting.id))
continue
self.assertEqual(response.status_code, 200)
def test_parties_overview(self):
response = self.client.get(reverse('parties'))
self.assertEqual(response.status_code, 200)
def test_party_view(self):
parties = PoliticalParty.objects.all()
for party in parties:
if not party.slug:
print('WARNING: Empty party found, skipping view')
continue
response = self.client.get(reverse('party', args=(party.slug,)))
self.assertEqual(response.status_code, 200)
self.assertGreaterEqual(len(parties), 50)
def test_parliament_members_overview(self):
response = self.client.get(reverse('parliament-members'))
self.assertEqual(response.status_code, 200)
def test_parliament_members_check(self):
password = 'adminpassword'
my_admin = User.objects.create_superuser('adminuser', 'admin@admin.com', password)
self.client.login(username=my_admin.username, password=password)
response = self.client.get(reverse('parliament-members-check'))
self.assertEqual(response.status_code, 200)
self.client.logout()
def test_database_dumps_view(self):
response = self.client.get(reverse('database-dumps'))
self.assertEqual(response.status_code, 200)
def test_stats_view(self):
response = self.client.get(reverse('stats'))
self.assertEqual(response.status_code, 200)
def test_data_stats_view(self):
response = self.client.get(reverse('stats-data'))
self.assertEqual(response.status_code, 200)
def test_plot_example_view(self):
response = self.client.get('/stats/exampleplots/')
self.assertEqual(response.status_code, 200)
def test_api_homepage(self):
response = self.client.get('/api/')
self.assertEqual(response.status_code, 200)
def test_api_person(self):
response = self.client.get('/api/person/')
self.assertEqual(response.status_code, 200)
def test_api_parliament(self):
response = self.client.get('/api/parliament/')
self.assertEqual(response.status_code, 200)
def test_api_parliament_member(self):
response = self.client.get('/api/parliament_member/')
self.assertEqual(response.status_code, 200)
def test_api_party(self):
response = self.client.get('/api/party/')
self.assertEqual(response.status_code, 200)
def test_api_party_member(self):
response = self.client.get('/api/party_member/')
self.assertEqual(response.status_code, 200)
def test_api_document(self):
response = self.client.get('/api/document/')
self.assertEqual(response.status_code, 200)
def test_api_kamerstuk(self):
response = self.client.get('/api/kamerstuk/')
self.assertEqual(response.status_code, 200)
def test_api_submitter(self):
response = self.client.get('/api/submitter/')
self.assertEqual(response.status_code, 200)
def test_api_dossier(self):
response = self.client.get('/api/dossier/')
self.assertEqual(response.status_code, 200)
def test_api_voting(self):
response = self.client.get('/api/voting/')
self.assertEqual(response.status_code, 200)
def test_api_voteparty(self):
response = self.client.get('/api/vote_party/')
self.assertEqual(response.status_code, 200)
def test_api_voteindividual(self):
response = self.client.get('/api/vote_individual/')
self.assertEqual(response.status_code, 200)
def test_api_category_dossier(self):
response = self.client.get('/api/category_dossier/')
self.assertEqual(response.status_code, 200)
def test_api_category_document(self):
response = self.client.get('/api/category_document/')
self.assertEqual(response.status_code, 200)
class TestCategory(TestCase):
def test_create_dossier_category_from_string(self):
self.create_category_from_string(CategoryDossier)
def test_create_document_category_from_string(self):
self.create_category_from_string(CategoryDocument)
def create_category_from_string(self, category_class):
text = 'Zorg en gezondheid | Ziekten en behandelingen'
expected_names = [
'zorg en gezondheid',
'ziekten en behandelingen',
]
categories = openkamer.dossier.get_categories(text, category_class)
self.assertEqual(len(categories), 2)
for index, category in enumerate(categories):
self.assertEqual(expected_names[index], category.name)
text = ' Zorg en Gezondheid| Ziekten en Behandelingen'
expected_names = [
'zorg en gezondheid',
'ziekten en behandelingen',
]
categories = openkamer.dossier.get_categories(text, category_class)
self.assertEqual(len(categories), 2)
for index, category in enumerate(categories):
self.assertEqual(expected_names[index], category.name)
class TestDocumentLinks(TestCase):
@classmethod
def setUpTestData(cls):
dosser_id = '33569'
dossier = Dossier.objects.create(dossier_id=dosser_id)
document = Document.objects.create(dossier=dossier)
Kamerstuk.objects.create(document=document, id_main=dosser_id, id_sub='1')
Kamerstuk.objects.create(document=document, id_main=dosser_id, id_sub='2')
Kamerstuk.objects.create(document=document, id_main=dosser_id, id_sub='3')
def test_create_new_url(self):
url = 'kst-33569-1.html'
url_expected = '/kamerstuk/33569/1/'
self.check_url(url, url_expected)
url = 'kst-33569-A.html'
url_expected = 'https://zoek.officielebekendmakingen.nl/kst-33569-A.html'
self.check_url(url, url_expected)
url = 'http://www.google.com'
url_expected = 'http://www.google.com'
self.check_url(url, url_expected)
url = '#anchor-1'
url_expected = '#anchor-1'
self.check_url(url, url_expected)
def check_url(self, url, url_expected):
new_url = openkamer.document.create_new_url(url)
self.assertEqual(new_url, url_expected)
url = new_url
new_url = openkamer.document.create_new_url(url)
self.assertEqual(new_url, url)
| openkamer/openkamer | website/tests.py | Python | mit | 13,880 | 0.001225 |
from __future__ import unicode_literals
import sys
import os
import random
import matplotlib
# Make sure that we are using QT5
matplotlib.use('Qt5Agg')
from PyQt5 import QtCore, QtWidgets
from numpy import arange, sin, pi
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
progname = os.path.basename(sys.argv[0])
progversion = "0.1"
class MyMplCanvas(FigureCanvas):
"""Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.)."""
def __init__(self, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
self.compute_initial_figure()
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QtWidgets.QSizePolicy.Expanding,
QtWidgets.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def compute_initial_figure(self):
pass
class MyStaticMplCanvas(MyMplCanvas):
"""Simple canvas with a sine plot."""
def compute_initial_figure(self):
t = arange(0.0, 3.0, 0.01)
s = sin(2 * pi * t)
self.axes.plot(t, s)
class MyDynamicMplCanvas(MyMplCanvas):
"""A canvas that updates itself every second with a new plot."""
def __init__(self, *args, **kwargs):
MyMplCanvas.__init__(self, *args, **kwargs)
timer = QtCore.QTimer(self)
timer.timeout.connect(self.update_figure)
timer.start(1000)
def compute_initial_figure(self):
self.axes.plot([0, 1, 2, 3], [1, 2, 0, 4], 'r')
def update_figure(self):
# Build a list of 4 random integers between 0 and 10 (both inclusive)
l = [random.randint(0, 10) for i in range(4)]
self.axes.cla()
self.axes.plot([0, 1, 2, 3], l, 'r')
self.draw()
class ApplicationWindow(QtWidgets.QMainWindow):
def __init__(self):
QtWidgets.QMainWindow.__init__(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.setWindowTitle("application main window")
self.file_menu = QtWidgets.QMenu('&File', self)
self.file_menu.addAction('&Quit', self.fileQuit,
QtCore.Qt.CTRL + QtCore.Qt.Key_Q)
self.menuBar().addMenu(self.file_menu)
self.help_menu = QtWidgets.QMenu('&Help', self)
self.menuBar().addSeparator()
self.menuBar().addMenu(self.help_menu)
self.help_menu.addAction('&About', self.about)
self.main_widget = QtWidgets.QWidget(self)
l = QtWidgets.QVBoxLayout(self.main_widget)
sc = MyStaticMplCanvas(self.main_widget, width=5, height=4, dpi=100)
dc = MyDynamicMplCanvas(self.main_widget, width=5, height=4, dpi=100)
l.addWidget(sc)
l.addWidget(dc)
self.main_widget.setFocus()
self.setCentralWidget(self.main_widget)
self.statusBar().showMessage("All hail matplotlib!", 2000)
def fileQuit(self):
self.close()
def closeEvent(self, ce):
self.fileQuit()
def about(self):
QtWidgets.QMessageBox.about(self, "About",
"""embedding_in_qt5.py example
Copyright 2005 Florent Rougon, 2006 Darren Dale, 2015 Jens H Nielsen
This program is a simple example of a Qt5 application embedding matplotlib
canvases.
It may be used and modified with no restriction; raw copies as well as
modified versions may be distributed without limitation.
This is modified from the embedding in qt4 example to show the difference
between qt4 and qt5"""
)
qApp = QtWidgets.QApplication(sys.argv)
aw = ApplicationWindow()
aw.setWindowTitle("%s" % progname)
aw.show()
sys.exit(qApp.exec_())
# qApp.exec_()
| RyanChinSang/ECNG3020-ORSS4SCVI | BETA/TestCode/Matplotlib/mpl1.py | Python | gpl-3.0 | 3,865 | 0.001811 |
# vim:set fileencoding=utf-8
#
# Copyright (C) 2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): David Shea <dshea@redhat.com>
#
from pyanaconda import users
import unittest
import tempfile
import shutil
import os
import crypt
import platform
import glob
@unittest.skipIf(os.geteuid() != 0, "user creation must be run as root")
class UserCreateTest(unittest.TestCase):
def setUp(self):
self.users = users.Users()
# Create a temporary directory with empty passwd and group files
self.tmpdir = tempfile.mkdtemp()
os.mkdir(self.tmpdir + "/etc")
open(self.tmpdir + "/etc/passwd", "w").close()
open(self.tmpdir + "/etc/group", "w").close()
open(self.tmpdir + "/etc/shadow", "w").close()
open(self.tmpdir + "/etc/gshadow", "w").close()
# Copy over enough of libnss for UID and GID lookups to work
with open(self.tmpdir + "/etc/nsswitch.conf", "w") as f:
f.write("passwd: files\n")
f.write("shadow: files\n")
f.write("group: files\n")
f.write("initgroups: files\n")
if platform.architecture()[0].startswith("64"):
libdir = "/lib64"
else:
libdir = "/lib"
os.mkdir(self.tmpdir + libdir)
for lib in glob.glob(libdir + "/libnss_files*"):
shutil.copy(lib, self.tmpdir + lib)
def tearDown(self):
shutil.rmtree(self.tmpdir)
def _readFields(self, filename, key):
"""Look for a line in a password or group file where the first field
matches key, and return the record as a list of fields.
"""
with open(self.tmpdir + filename) as f:
for line in f:
fields = line.strip().split(':')
if fields[0] == key:
return fields
return None
def create_group_test(self):
"""Create a group."""
self.users.createGroup("test_group", root=self.tmpdir)
fields = self._readFields("/etc/group", "test_group")
self.assertIsNotNone(fields)
self.assertEqual(fields[0], "test_group")
fields = self._readFields("/etc/gshadow", "test_group")
self.assertIsNotNone(fields)
self.assertEqual(fields[0], "test_group")
def create_group_gid_test(self):
"""Create a group with a specific GID."""
self.users.createGroup("test_group", gid=47, root=self.tmpdir)
fields = self._readFields("/etc/group", "test_group")
self.assertIsNotNone(fields)
self.assertEqual(fields[0], "test_group")
self.assertEqual(fields[2], "47")
def create_group_exists_test(self):
"""Create a group that already exists."""
with open(self.tmpdir + "/etc/group", "w") as f:
f.write("test_group:x:47:\n")
self.assertRaises(ValueError, self.users.createGroup, "test_group", root=self.tmpdir)
def create_group_gid_exists_test(self):
"""Create a group with a GID that already exists."""
with open(self.tmpdir + "/etc/group", "w") as f:
f.write("gid_used:x:47:\n")
self.assertRaises(ValueError, self.users.createGroup, "test_group", gid=47, root=self.tmpdir)
def create_user_test(self):
"""Create a user."""
self.users.createUser("test_user", root=self.tmpdir)
pwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(pwd_fields)
self.assertEqual(pwd_fields[0], "test_user")
# Check that the fields got the right default values
# UID + GID set to some sort of int
self.assertTrue(isinstance(int(pwd_fields[2]), int))
self.assertTrue(isinstance(int(pwd_fields[3]), int))
# home is /home/username
self.assertEqual(pwd_fields[5], "/home/test_user")
# shell set to something
self.assertTrue(pwd_fields[6])
shadow_fields = self._readFields("/etc/shadow", "test_user")
self.assertIsNotNone(shadow_fields)
self.assertEqual(shadow_fields[0], "test_user")
# Ensure the password is locked
self.assertTrue(shadow_fields[1].startswith("!"))
# Ensure the date of last password change is empty
self.assertEqual(shadow_fields[2], "")
# Check that the user group was created
grp_fields = self._readFields("/etc/group", "test_user")
self.assertIsNotNone(grp_fields)
self.assertEqual(grp_fields[0], "test_user")
# Check that user group's GID matches the user's GID
self.assertEqual(grp_fields[2], pwd_fields[3])
gshadow_fields = self._readFields("/etc/gshadow", "test_user")
self.assertIsNotNone(gshadow_fields)
self.assertEqual(gshadow_fields[0], "test_user")
def create_user_text_options_test(self):
"""Create a user with the text fields set."""
self.users.createUser("test_user", gecos="Test User", homedir="/home/users/testuser", shell="/bin/test", root=self.tmpdir)
pwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(pwd_fields)
self.assertEqual(pwd_fields[0], "test_user")
self.assertEqual(pwd_fields[4], "Test User")
self.assertEqual(pwd_fields[5], "/home/users/testuser")
self.assertEqual(pwd_fields[6], "/bin/test")
# Check that the home directory was created
self.assertTrue(os.path.isdir(self.tmpdir + "/home/users/testuser"))
def create_user_groups_test(self):
"""Create a user with a list of groups."""
# Create one of the groups
self.users.createGroup("test3", root=self.tmpdir)
# Create a user and add it three groups, two of which do not exist,
# and one which specifies a GID.
self.users.createUser("test_user", groups=["test1", "test2(5001)", "test3"], root=self.tmpdir)
grp_fields1 = self._readFields("/etc/group", "test1")
self.assertEqual(grp_fields1[3], "test_user")
grp_fields2 = self._readFields("/etc/group", "test2")
self.assertEqual(grp_fields2[3], "test_user")
self.assertEqual(grp_fields2[2], "5001")
grp_fields3 = self._readFields("/etc/group", "test3")
self.assertEqual(grp_fields3[3], "test_user")
def create_user_groups_gid_conflict_test(self):
"""Create a user with a bad list of groups."""
# Create one of the groups
self.users.createGroup("test3", gid=5000, root=self.tmpdir)
# Add test3 to the group list with a different GID.
self.assertRaises(ValueError, self.users.createUser,
"test_user", groups=["test3(5002)"], root=self.tmpdir)
def create_user_password_test(self):
"""Create a user with a password."""
self.users.createUser("test_user1", password="password", root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user1")
self.assertIsNotNone(shadow_fields)
# Make sure the password works
self.assertEqual(crypt.crypt("password", shadow_fields[1]), shadow_fields[1])
# Set the encrypted password for another user with isCrypted
cryptpw = shadow_fields[1]
self.users.createUser("test_user2", password=cryptpw, isCrypted=True, root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user2")
self.assertIsNotNone(shadow_fields)
self.assertEqual(cryptpw, shadow_fields[1])
# Set an empty password
self.users.createUser("test_user3", password="", root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user3")
self.assertIsNotNone(shadow_fields)
self.assertEqual("", shadow_fields[1])
def create_user_lock_test(self):
"""Create a locked user account."""
# Create an empty, locked password
self.users.createUser("test_user1", lock=True, password="", root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user1")
self.assertIsNotNone(shadow_fields)
self.assertEqual("!", shadow_fields[1])
# Create a locked password and ensure it can be unlocked (by removing the ! at the front)
self.users.createUser("test_user2", lock=True, password="password", root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user2")
self.assertIsNotNone(shadow_fields)
self.assertTrue(shadow_fields[1].startswith("!"))
self.assertEqual(crypt.crypt("password", shadow_fields[1][1:]), shadow_fields[1][1:])
def create_user_uid_test(self):
"""Create a user with a specific UID."""
self.users.createUser("test_user", uid=1047, root=self.tmpdir)
pwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(pwd_fields)
self.assertEqual(pwd_fields[2], "1047")
def create_user_gid_test(self):
"""Create a user with a specific GID."""
self.users.createUser("test_user", gid=1047, root=self.tmpdir)
pwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(pwd_fields)
self.assertEqual(pwd_fields[3], "1047")
grp_fields = self._readFields("/etc/group", "test_user")
self.assertIsNotNone(grp_fields)
self.assertEqual(grp_fields[2], "1047")
def create_user_algo_test(self):
"""Create a user with a specific password algorithm."""
self.users.createUser("test_user1", password="password", algo="md5", root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user1")
self.assertIsNotNone(shadow_fields)
self.assertTrue(shadow_fields[1].startswith("$1$"))
self.users.createUser("test_user2", password="password", algo="sha512", root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "test_user2")
self.assertIsNotNone(shadow_fields)
self.assertTrue(shadow_fields[1].startswith("$6$"))
def create_user_exists_test(self):
"""Create a user that already exists."""
with open(self.tmpdir + "/etc/passwd", "w") as f:
f.write("test_user:x:1000:1000::/:/bin/sh\n")
self.assertRaises(ValueError, self.users.createUser, "test_user", root=self.tmpdir)
def create_user_uid_exists_test(self):
"""Create a user with a UID that already exists."""
with open(self.tmpdir + "/etc/passwd", "w") as f:
f.write("conflict:x:1000:1000::/:/bin/sh\n")
self.assertRaises(ValueError, self.users.createUser, "test_user", uid=1000, root=self.tmpdir)
def create_user_gid_exists_test(self):
"""Create a user with a GID of an existing group."""
self.users.createGroup("test_group", gid=5000, root=self.tmpdir)
self.users.createUser("test_user", gid=5000, root=self.tmpdir)
passwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(passwd_fields)
self.assertEqual(passwd_fields[3], "5000")
def set_user_ssh_key_test(self):
keydata = "THIS IS TOTALLY A SSH KEY"
self.users.createUser("test_user", homedir="/home/test_user", root=self.tmpdir)
self.users.setUserSshKey("test_user", keydata, root=self.tmpdir)
keyfile = self.tmpdir + "/home/test_user/.ssh/authorized_keys"
self.assertTrue(os.path.isfile(keyfile))
with open(keyfile) as f:
output_keydata = f.read()
self.assertEqual(keydata, output_keydata.strip())
def set_root_password_test(self):
password = "password1"
# Initialize a root user with an empty password, like the setup package would have
with open(self.tmpdir + "/etc/passwd", "w") as f:
f.write("root:x:0:0:root:/root:/bin/bash\n")
with open(self.tmpdir + "/etc/shadow", "w") as f:
f.write("root:*:16489:0:99999:7:::\n")
self.users.setRootPassword(password, root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "root")
self.assertEqual(crypt.crypt(password, shadow_fields[1]), shadow_fields[1])
# Try a different password with isLocked=True
password = "password2"
self.users.setRootPassword(password, isLocked=True, root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "root")
self.assertTrue(shadow_fields[1].startswith("!"))
self.assertEqual(crypt.crypt(password, shadow_fields[1][1:]), shadow_fields[1][1:])
# Try an encrypted password
password = "$1$asdf$password"
self.users.setRootPassword(password, isCrypted=True, root=self.tmpdir)
shadow_fields = self._readFields("/etc/shadow", "root")
self.assertEqual(password, shadow_fields[1])
def create_user_reuse_home_test(self):
# Create a user, reusing an old home directory
os.makedirs(self.tmpdir + "/home/test_user")
os.chown(self.tmpdir + "/home/test_user", 500, 500)
self.users.createUser("test_user", homedir="/home/test_user", uid=1000, gid=1000, root=self.tmpdir)
passwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(passwd_fields)
self.assertEqual(passwd_fields[2], "1000")
self.assertEqual(passwd_fields[3], "1000")
stat_fields = os.stat(self.tmpdir + "/home/test_user")
self.assertEqual(stat_fields.st_uid, 1000)
self.assertEqual(stat_fields.st_gid, 1000)
def create_user_gid_in_group_list_test(self):
"""Create a user with a GID equal to that of one of the requested groups"""
self.users.createUser("test_user", gid=1047, groups=["test_group(1047)"], root=self.tmpdir)
# Ensure that the user's GID is equal to the GID requested
pwd_fields = self._readFields("/etc/passwd", "test_user")
self.assertIsNotNone(pwd_fields)
self.assertEqual(pwd_fields[3], "1047")
# and that the requested group has the right GID
grp_fields = self._readFields("/etc/group", "test_group")
self.assertIsNotNone(grp_fields)
self.assertEqual(grp_fields[2], "1047")
| dashea/anaconda | tests/pyanaconda_tests/user_create_test.py | Python | gpl-2.0 | 15,039 | 0.001596 |
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
## WARNING: THE FOLLOWING LINE WILL BE OVERWRITTEN AT INSTALLATION TIME
indico_conf = "" # path to indico.conf
##
import os
if indico_conf == '': # we may be in development mode or in installation mode
indico_conf = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'etc', 'indico.conf')
if not os.path.exists(indico_conf):
# eggmode
indico_conf = os.path.join(os.path.dirname(__file__), '..', '..', 'etc', 'indico.conf.sample')
if not os.path.exists(indico_conf):
indico_conf = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'etc', 'indico.conf.sample')
execfile(indico_conf)
| pferreir/indico-backup | indico/MaKaC/common/MaKaCConfig.py | Python | gpl-3.0 | 1,407 | 0.013504 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ConnectionMonitorsOperations(object):
"""ConnectionMonitorsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
parameters, # type: "_models.ConnectionMonitor"
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionMonitorResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ConnectionMonitor')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
parameters, # type: "_models.ConnectionMonitor"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ConnectionMonitorResult"]
"""Create or update a connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:param parameters: Parameters that define the operation to create a connection monitor.
:type parameters: ~azure.mgmt.network.v2018_12_01.models.ConnectionMonitor
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectionMonitorResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_12_01.models.ConnectionMonitorResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionMonitorResult"
"""Gets a connection monitor by name.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionMonitorResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_12_01.models.ConnectionMonitorResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionMonitorResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}'} # type: ignore
def _stop_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self._stop_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'} # type: ignore
def begin_stop(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Stops the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/stop'} # type: ignore
def _start_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self._start_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'} # type: ignore
def begin_start(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Starts the specified connection monitor.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name of the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/start'} # type: ignore
def _query_initial(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.ConnectionMonitorQueryResult"
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorQueryResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
# Construct URL
url = self._query_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_query_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'} # type: ignore
def begin_query(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
connection_monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ConnectionMonitorQueryResult"]
"""Query a snapshot of the most recent connection states.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param connection_monitor_name: The name given to the connection monitor.
:type connection_monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ConnectionMonitorQueryResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_12_01.models.ConnectionMonitorQueryResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorQueryResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._query_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
connection_monitor_name=connection_monitor_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorQueryResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'connectionMonitorName': self._serialize.url("connection_monitor_name", connection_monitor_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_query.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors/{connectionMonitorName}/query'} # type: ignore
def list(
self,
resource_group_name, # type: str
network_watcher_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ConnectionMonitorListResult"]
"""Lists all connection monitors for the specified Network Watcher.
:param resource_group_name: The name of the resource group containing Network Watcher.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ConnectionMonitorListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_12_01.models.ConnectionMonitorListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionMonitorListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkWatcherName': self._serialize.url("network_watcher_name", network_watcher_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ConnectionMonitorListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/connectionMonitors'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_12_01/operations/_connection_monitors_operations.py | Python | mit | 42,196 | 0.00519 |
from ..remote import RemoteModel
from infoblox_netmri.utils.utils import check_api_availability
class SpmDevicesVendorModelGridRemote(RemoteModel):
"""
This table lists all SPM devices that existed within the user specified period of time sorted by the Device Name in ascending order.
| ``id:`` The internal NetMRI identifier of the grid entry.
| ``attribute type:`` number
| ``Network:`` The name of the Network View associated.
| ``attribute type:`` string
| ``DeviceID:`` The NetMRI internal identifier for the device.
| ``attribute type:`` number
| ``DeviceName:`` The NetMRI name of the device; this will be either the same as DeviceSysName or DeviceDNSName, depending on your NetMRI configuration.
| ``attribute type:`` string
| ``DeviceIPDotted:`` The management IP address of the device, in dotted (or colon-delimited for IPv6) format.
| ``attribute type:`` string
| ``DeviceIPNumeric:`` The numerical value of the device IP address.
| ``attribute type:`` number
| ``DeviceDNSName:`` The device name as reported by DNS.
| ``attribute type:`` string
| ``TotalPorts:`` Total number of ports.
| ``attribute type:`` number
| ``FreePorts:`` Number of free ports.
| ``attribute type:`` number
| ``FreePortsPercentage:`` Percentage of all ports that are free.
| ``attribute type:`` number
| ``AvailPorts:`` Number of available ports.
| ``attribute type:`` number
| ``AvailPortsPercentage:`` Percentage of all ports that are available.
| ``attribute type:`` number
| ``PoEPorts:`` Number of Power-over-Ethernet ports.
| ``attribute type:`` number
| ``DeviceSysLocation:`` The device sysLocation as reported by SNMP.
| ``attribute type:`` string
| ``DeviceVendor:`` The device vendor name.
| ``attribute type:`` string
| ``DeviceModel:`` The device model name.
| ``attribute type:`` string
| ``PhysicalSerialNum:`` The vendor-specific serial number string for the physical entity. The preferred value is the serial number string actually printed on the component itself (if present).
| ``attribute type:`` string
| ``DeviceSysDescr:`` The device sysDescr as reported by SNMP.
| ``attribute type:`` string
| ``DeviceType:`` The NetMRI-determined device type.
| ``attribute type:`` string
| ``FirstSeen:`` The timestamp of when NetMRI first discovered this device.
| ``attribute type:`` datetime
| ``LastSeen:`` The timestamp of when NetMRI last polled data from this device.
| ``attribute type:`` datetime
| ``LastChanged:`` The timestamp of the last change on this device.
| ``attribute type:`` string
| ``PollDuration:`` Number of seconds it took to poll the device.
| ``attribute type:`` number
| ``SwitchingInd:`` A flag indicating whether a switch port forwarding table was retrieved from this device.
| ``attribute type:`` bool
| ``DeviceAssurance:`` Internal use only
| ``attribute type:`` string
| ``VirtualNetworkID:`` The internal identifier for the network which the device is associated to.
| ``attribute type:`` number
| ``UsedAccessPorts:`` Used Access Ports
| ``attribute type:`` number
| ``UsedTrunkPorts:`` Used Trunk Ports
| ``attribute type:`` number
"""
properties = ("id",
"Network",
"DeviceID",
"DeviceName",
"DeviceIPDotted",
"DeviceIPNumeric",
"DeviceDNSName",
"TotalPorts",
"FreePorts",
"FreePortsPercentage",
"AvailPorts",
"AvailPortsPercentage",
"PoEPorts",
"DeviceSysLocation",
"DeviceVendor",
"DeviceModel",
"PhysicalSerialNum",
"DeviceSysDescr",
"DeviceType",
"FirstSeen",
"LastSeen",
"LastChanged",
"PollDuration",
"SwitchingInd",
"DeviceAssurance",
"VirtualNetworkID",
"UsedAccessPorts",
"UsedTrunkPorts",
)
@property
@check_api_availability
def meta(self):
"""
User custom fields
``attribute type:`` model
"""
return self.broker.meta(**{"id": self.id})
| infobloxopen/infoblox-netmri | infoblox_netmri/api/remote/models/spm_devices_vendor_model_grid_remote.py | Python | apache-2.0 | 4,572 | 0.00175 |
import os
import rospkg
import lxml.etree as ET
from openag_brain import params
from openag.models import SoftwareModule, SoftwareModuleType
from openag.db_names import SOFTWARE_MODULE, SOFTWARE_MODULE_TYPE
# maping from python types to roslaunch acceptable ones
PARAM_TYPE_MAPPING = {'float' : 'double'}
def create_node(parent, pkg, type, name, args=None):
"""
Creates an xml node for the launch file that represents a ROS node.
`parent` is the parent xml node. `pkg` is the ROS package of the node.
`type` is the name of the executable for the node. `name` is the name
of the ROS node.
"""
e = ET.SubElement(parent, 'node')
e.attrib['pkg'] = pkg
e.attrib['type'] = type
e.attrib['name'] = name
if args:
e.attrib['args'] = args
return e
def create_param(parent, name, value, type):
"""
Creates an xml node for the launch file that represents a ROS parameter.
`parent` is the parent xml node. `name` is the name of the parameter to
set. `value` is the value of the parameter. `type` is the type of the
paremeter (e.g. int, float)
"""
e = ET.SubElement(parent, 'param')
e.attrib['name'] = name
e.attrib['value'] = value
e.attrib['type'] = PARAM_TYPE_MAPPING.get(type, type)
return e
def create_group(parent, ns):
"""
Creates an xml node for the launch file that represents a ROS group.
`parent` is the parent xml node. `ns` is the namespace of the group.
"""
e = ET.SubElement(parent, 'group')
e.attrib['ns'] = ns
return e
def create_remap(parent, from_val, to_val):
"""
Creates an xml node for the launch file that represents a name remapping.
`parent` is the parent xml node. `from_val` is the name that is to be
remapped. `to_val` is the target name.
"""
e = ET.SubElement(parent, 'remap')
e.attrib['from'] = from_val
e.attrib['to'] = to_val
def create_arg(parent, name, default=None, value=None):
"""
Creates an xml node for the launch file that represents a command line
argument. `parent` is the parent xml node. `default` is the default value
of the argument. `value` is the value of the argument. At most one of
`default` and `value` can be provided.
"""
e = ET.SubElement(parent, 'arg')
e.attrib['name'] = name
if default and value:
raise ValueError(
"Argument cannot have both a default value and a value"
)
if default is not None:
e.attrib['default'] = str(default)
if value is not None:
e.attrib['value'] = str(value)
def update_launch(server):
"""
Write a roslaunch file to `modules.launch` based on the software module
configuration read from the `couchdb.Server` instance `server`.
"""
# Form a launch file from the parameter configuration
root = ET.Element('launch')
groups = {None: root}
module_db = server[SOFTWARE_MODULE]
module_types_db = server[SOFTWARE_MODULE_TYPE]
modules = {
module_id: SoftwareModule(module_db[module_id]) for module_id in
module_db if not module_id.startswith('_')
}
for module_id, module in modules.items():
print 'Processing module "{}" from server'.format(module_id)
mod_ns = module.get("namespace", module.get("environment", None))
if not mod_ns in groups:
group = create_group(root, mod_ns)
groups[mod_ns] = group
else:
group = groups[mod_ns]
if module["type"] in module_types_db:
module_type = SoftwareModuleType(module_types_db[module["type"]])
else:
raise RuntimeError(
'Module "{}" references nonexistant module type "{}'.format(
module_id, module["type"]
)
)
args = module.get("arguments", [])
args_str = ", ".join(args)
node = create_node(
group, module_type["package"], module_type["executable"],
module_id, args_str
)
for param_name, param_info in module_type["parameters"].items():
param_value = module.get("parameters", {}).get(
param_name, param_info.get("default", None)
)
param_type = param_info["type"]
if param_value is None:
if param_info.get("required", False):
raise RuntimeError(
'Parameter "{param}" is not defined for software '
'module "{mod_id}"'.format(
param=param_name, mod_id=module.id
)
)
else:
param_value = str(param_value) \
if not isinstance(param_value, bool) else \
str(param_value).lower()
param_type = str(param_type)
create_param(node, param_name, param_value, param_type)
for k,v in module.get("mappings", {}).items():
create_remap(node, k, v)
doc = ET.ElementTree(root)
# Figure out where to write the launch file
rospack = rospkg.RosPack()
pkg_path = rospack.get_path('openag_brain')
launch_path = os.path.join(pkg_path, 'modules.launch')
doc.write(launch_path, pretty_print=True)
| serein7/openag_brain | src/openag_brain/commands/update_launch.py | Python | gpl-3.0 | 5,299 | 0.001698 |
# -*- coding: utf-8 -*-
""" Tests for the alarms module """
from __future__ import unicode_literals
from soco.alarms import is_valid_recurrence
def test_recurrence():
for recur in ('DAILY', 'WEEKDAYS', 'WEEKENDS', 'ONCE'):
assert is_valid_recurrence(recur)
assert is_valid_recurrence('ON_1')
assert is_valid_recurrence('ON_123412')
assert not is_valid_recurrence('on_1')
assert not is_valid_recurrence('ON_123456789')
assert not is_valid_recurrence('ON_')
assert not is_valid_recurrence(' ON_1')
| oyvindmal/SocoWebService | unittest/test_alarms.py | Python | mit | 538 | 0.003717 |
#!/usr/bin/env python
import os,sys
folder = "/media/kentir1/Development/Linux_Program/Fundkeep/"
def makinGetYear():
return os.popen("date +'%Y'").read()[:-1]
def makinGetMonth():
return os.popen("date +'%m'").read()[:-1]
def makinGetDay():
return os.popen("date +'%d'").read()[:-1]
def makinGetPrevYear(daypassed):
return os.popen("date --date='"+str(daypassed)+" day ago' +'%Y'").read()[:-1]
def makinGetPrevMonth(daypassed):
return os.popen("date --date='"+str(daypassed)+" day ago' +'%m'").read()[:-1]
def makinGetPrevDay(daypassed):
return os.popen("date --date='"+str(daypassed)+" day ago' +'%d'").read()[:-1]
#last entry
f = open(folder+"data/last_entry","r")
le = f.read()
le_y=le[:4]
le_m=le[4:6]
le_d=le[6:]
#input
os.system("gedit "+folder+"var/input")
f = open(folder+"var/input","r")
data = f.read()
f.close()
balance_out = int(data[:data.find(" ")])
balance_ket = data[data.find(" ")+1:-1]
print balance_ket
os.system("mkdir "+folder+"data")
os.system("mkdir "+folder+"data/"+makinGetYear())
os.system("mkdir "+folder+"data/"+makinGetYear()+"/"+makinGetMonth())
os.system("mkdir "+folder+"data/"+makinGetYear()+"/"+makinGetMonth()+"/"+makinGetDay())
balance_before = 0
#ambil balance dr hari sebelumnya
dapet = 0
while (dapet == 0):
dpassed = 1
try:
f = open(folder+"data/"
+makinGetPrevYear(dpassed)
+"/"
+makinGetPrevMonth(dpassed)
+"/"
+makinGetPrevDay(dpassed)
+"/balance_after","r")
if (makinGetDay()=="01"):
t_day = 31
t_bulan = ("0"+str(int(makinGetMonth())-1))[-2:]
t_tahun = makinGetYear()
if (int(makinGetMonth())=1):
t_bulan = 12
t_tahun = makinGetYear()-1
print t_bulan
dapet = 0
while (dapet==0):
try:
f = open(folder+"data/"+t_tahun+"/"+t_bulan+"/"+("0"+str(t_day))[-2:]+"/balance_after","r")
print t_day
dapet = 1
balance_before = int(f.read())
except:
t_day = t_day - 1
f.close()
else:
t_day = int(makinGetDay())-1
#~ t_bulan = ("0"+str(int(makinGetMonth())))[-2:]
t_bulan = makinGetMonth()
f = open(folder+"data/"+makinGetYear()+"/"+t_bulan+"/"+("0"+str(t_day))[-2:]+"/balance_after","r")
balance_before = int(f.read())
#bila fresh input
try:
f = open(folder+"data/"+t_tahun+"/"+t_bulan+"/"+("0"+str(t_day))[-2:]+"/balance_after","r")
except:
#bila hanya mengupdate isi balance_out (pengeluaran hari ini)
| imakin/PersonalAssistant | Fundkeep/modul/b__main_backu.py | Python | mit | 2,347 | 0.041329 |
# -*- coding: utf-8 -*-
"""
Basic unit tests for LibraryContentBlock
Higher-level tests are in `cms/djangoapps/contentstore/tests/test_libraries.py`.
"""
import six
from bson.objectid import ObjectId
from fs.memoryfs import MemoryFS
from lxml import etree
from mock import Mock, patch
from search.search_engine_base import SearchEngine
from six.moves import range
from web_fragments.fragment import Fragment
from xblock.runtime import Runtime as VanillaRuntime
from xmodule.library_content_module import ANY_CAPA_TYPE_VALUE, LibraryContentBlock
from xmodule.library_tools import LibraryToolsService
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, LibraryFactory
from xmodule.modulestore.tests.utils import MixedSplitTestCase
from xmodule.tests import get_test_system
from xmodule.validation import StudioValidationMessage
from xmodule.x_module import AUTHOR_VIEW
from .test_course_module import DummySystem as TestImportSystem
dummy_render = lambda block, _: Fragment(block.data) # pylint: disable=invalid-name
class LibraryContentTest(MixedSplitTestCase):
"""
Base class for tests of LibraryContentBlock (library_content_block.py)
"""
def setUp(self):
super(LibraryContentTest, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.tools = LibraryToolsService(self.store, self.user_id)
self.library = LibraryFactory.create(modulestore=self.store)
self.lib_blocks = [
self.make_block("html", self.library, data="Hello world from block {}".format(i))
for i in range(1, 5)
]
self.course = CourseFactory.create(modulestore=self.store)
self.chapter = self.make_block("chapter", self.course)
self.sequential = self.make_block("sequential", self.chapter)
self.vertical = self.make_block("vertical", self.sequential)
self.lc_block = self.make_block(
"library_content",
self.vertical,
max_count=1,
source_library_id=six.text_type(self.library.location.library_key)
)
def _bind_course_module(self, module):
"""
Bind a module (part of self.course) so we can access student-specific data.
"""
module_system = get_test_system(course_id=module.location.course_key)
module_system.descriptor_runtime = module.runtime._descriptor_system # pylint: disable=protected-access
module_system._services['library_tools'] = self.tools # pylint: disable=protected-access
def get_module(descriptor):
"""Mocks module_system get_module function"""
sub_module_system = get_test_system(course_id=module.location.course_key)
sub_module_system.get_module = get_module
sub_module_system.descriptor_runtime = descriptor._runtime # pylint: disable=protected-access
descriptor.bind_for_student(sub_module_system, self.user_id)
return descriptor
module_system.get_module = get_module
module.xmodule_runtime = module_system
class TestLibraryContentExportImport(LibraryContentTest):
"""
Export and import tests for LibraryContentBlock
"""
maxDiff = None
def test_xml_export_import_cycle(self):
"""
Test the export-import cycle.
"""
# Children will only set after calling this.
self.lc_block.refresh_children()
lc_block = self.store.get_item(self.lc_block.location)
expected_olx = (
'<library_content display_name="{block.display_name}" max_count="{block.max_count}"'
' source_library_id="{block.source_library_id}" source_library_version="{block.source_library_version}">\n'
' <html url_name="{block.children[0].block_id}"/>\n'
' <html url_name="{block.children[1].block_id}"/>\n'
' <html url_name="{block.children[2].block_id}"/>\n'
' <html url_name="{block.children[3].block_id}"/>\n'
'</library_content>\n'
).format(
block=lc_block,
)
export_fs = MemoryFS()
# Set the virtual FS to export the olx to.
lc_block.runtime._descriptor_system.export_fs = export_fs # pylint: disable=protected-access
# Export the olx.
node = etree.Element("unknown_root")
lc_block.add_xml_to_node(node)
# Read it back
with export_fs.open('{dir}/{file_name}.xml'.format(
dir=lc_block.scope_ids.usage_id.block_type,
file_name=lc_block.scope_ids.usage_id.block_id
)) as f:
exported_olx = f.read()
# And compare.
assert exported_olx == expected_olx
runtime = TestImportSystem(load_error_modules=True, course_id=lc_block.location.course_key)
runtime.resources_fs = export_fs
# Now import it.
olx_element = etree.fromstring(exported_olx)
id_generator = Mock()
imported_lc_block = LibraryContentBlock.parse_xml(olx_element, runtime, None, id_generator)
# Check the new XBlock has the same properties as the old one.
assert imported_lc_block.display_name == lc_block.display_name
assert imported_lc_block.source_library_id == lc_block.source_library_id
assert imported_lc_block.source_library_version == lc_block.source_library_version
assert imported_lc_block.mode == lc_block.mode
assert imported_lc_block.max_count == lc_block.max_count
assert imported_lc_block.capa_type == lc_block.capa_type
assert len(imported_lc_block.children) == 4
assert imported_lc_block.children == lc_block.children
class LibraryContentBlockTestMixin(object):
"""
Basic unit tests for LibraryContentBlock
"""
problem_types = [
["multiplechoiceresponse"], ["optionresponse"], ["optionresponse", "coderesponse"],
["coderesponse", "optionresponse"]
]
problem_type_lookup = {}
def _get_capa_problem_type_xml(self, *args):
""" Helper function to create empty CAPA problem definition """
problem = "<problem>"
for problem_type in args:
problem += "<{problem_type}></{problem_type}>".format(problem_type=problem_type)
problem += "</problem>"
return problem
def _create_capa_problems(self):
"""
Helper function to create a set of capa problems to test against.
Creates four blocks total.
"""
self.problem_type_lookup = {}
for problem_type in self.problem_types:
block = self.make_block("problem", self.library, data=self._get_capa_problem_type_xml(*problem_type))
self.problem_type_lookup[block.location] = problem_type
def test_lib_content_block(self):
"""
Test that blocks from a library are copied and added as children
"""
# Check that the LibraryContent block has no children initially
# Normally the children get added when the "source_libraries" setting
# is updated, but the way we do it through a factory doesn't do that.
assert len(self.lc_block.children) == 0
# Update the LibraryContent module:
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
# Check that all blocks from the library are now children of the block:
assert len(self.lc_block.children) == len(self.lib_blocks)
def test_children_seen_by_a_user(self):
"""
Test that each student sees only one block as a child of the LibraryContent block.
"""
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
# Make sure the runtime knows that the block's children vary per-user:
assert self.lc_block.has_dynamic_children()
assert len(self.lc_block.children) == len(self.lib_blocks)
# Check how many children each user will see:
assert len(self.lc_block.get_child_descriptors()) == 1
# Check that get_content_titles() doesn't return titles for hidden/unused children
assert len(self.lc_block.get_content_titles()) == 1
def test_validation_of_course_libraries(self):
"""
Test that the validation method of LibraryContent blocks can validate
the source_library setting.
"""
# When source_library_id is blank, the validation summary should say this block needs to be configured:
self.lc_block.source_library_id = ""
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.NOT_CONFIGURED == result.summary.type
# When source_library_id references a non-existent library, we should get an error:
self.lc_block.source_library_id = "library-v1:BAD+WOLF"
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.ERROR == result.summary.type
assert 'invalid' in result.summary.text
# When source_library_id is set but the block needs to be updated, the summary should say so:
self.lc_block.source_library_id = six.text_type(self.library.location.library_key)
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'out of date' in result.summary.text
# Now if we update the block, all validation should pass:
self.lc_block.refresh_children()
assert self.lc_block.validate()
def test_validation_of_matching_blocks(self):
"""
Test that the validation method of LibraryContent blocks can warn
the user about problems with other settings (max_count and capa_type).
"""
# Set max_count to higher value than exists in library
self.lc_block.max_count = 50
# In the normal studio editing process, editor_saved() calls refresh_children at this point
self.lc_block.refresh_children()
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'only 4 matching problems' in result.summary.text
# Add some capa problems so we can check problem type validation messages
self.lc_block.max_count = 1
self._create_capa_problems()
self.lc_block.refresh_children()
assert self.lc_block.validate()
# Existing problem type should pass validation
self.lc_block.max_count = 1
self.lc_block.capa_type = 'multiplechoiceresponse'
self.lc_block.refresh_children()
assert self.lc_block.validate()
# ... unless requested more blocks than exists in library
self.lc_block.max_count = 10
self.lc_block.capa_type = 'multiplechoiceresponse'
self.lc_block.refresh_children()
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'only 1 matching problem' in result.summary.text
# Missing problem type should always fail validation
self.lc_block.max_count = 1
self.lc_block.capa_type = 'customresponse'
self.lc_block.refresh_children()
result = self.lc_block.validate()
assert not result
# Validation fails due to at least one warning/message
assert result.summary
assert StudioValidationMessage.WARNING == result.summary.type
assert 'no matching problem types' in result.summary.text
def test_capa_type_filtering(self):
"""
Test that the capa type filter is actually filtering children
"""
self._create_capa_problems()
assert len(self.lc_block.children) == 0
# precondition check
self.lc_block.capa_type = "multiplechoiceresponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 1
self.lc_block.capa_type = "optionresponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 3
self.lc_block.capa_type = "coderesponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 2
self.lc_block.capa_type = "customresponse"
self.lc_block.refresh_children()
assert len(self.lc_block.children) == 0
self.lc_block.capa_type = ANY_CAPA_TYPE_VALUE
self.lc_block.refresh_children()
assert len(self.lc_block.children) == (len(self.lib_blocks) + 4)
def test_non_editable_settings(self):
"""
Test the settings that are marked as "non-editable".
"""
non_editable_metadata_fields = self.lc_block.non_editable_metadata_fields
assert LibraryContentBlock.mode in non_editable_metadata_fields
assert LibraryContentBlock.display_name not in non_editable_metadata_fields
def test_overlimit_blocks_chosen_randomly(self):
"""
Tests that blocks to remove from selected children are chosen
randomly when len(selected) > max_count.
"""
blocks_seen = set()
total_tries, max_tries = 0, 100
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
# Eventually, we should see every child block selected
while len(blocks_seen) != len(self.lib_blocks):
self._change_count_and_refresh_children(len(self.lib_blocks))
# Now set the number of selections to 1
selected = self._change_count_and_refresh_children(1)
blocks_seen.update(selected)
total_tries += 1
if total_tries >= max_tries:
assert False, "Max tries exceeded before seeing all blocks."
break
def _change_count_and_refresh_children(self, count):
"""
Helper method that changes the max_count of self.lc_block, refreshes
children, and asserts that the number of selected children equals the count provided.
"""
self.lc_block.max_count = count
selected = self.lc_block.get_child_descriptors()
assert len(selected) == count
return selected
@patch('xmodule.library_tools.SearchEngine.get_search_engine', Mock(return_value=None, autospec=True))
class TestLibraryContentBlockNoSearchIndex(LibraryContentBlockTestMixin, LibraryContentTest):
"""
Tests for library container when no search index is available.
Tests fallback low-level CAPA problem introspection
"""
pass # pylint:disable=unnecessary-pass
search_index_mock = Mock(spec=SearchEngine) # pylint: disable=invalid-name
@patch('xmodule.library_tools.SearchEngine.get_search_engine', Mock(return_value=search_index_mock, autospec=True))
class TestLibraryContentBlockWithSearchIndex(LibraryContentBlockTestMixin, LibraryContentTest):
"""
Tests for library container with mocked search engine response.
"""
def _get_search_response(self, field_dictionary=None):
""" Mocks search response as returned by search engine """
target_type = field_dictionary.get('problem_types')
matched_block_locations = [
key for key, problem_types in
self.problem_type_lookup.items() if target_type in problem_types
]
return {
'results': [
{'data': {'id': str(location)}} for location in matched_block_locations
]
}
def setUp(self):
""" Sets up search engine mock """
super(TestLibraryContentBlockWithSearchIndex, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
search_index_mock.search = Mock(side_effect=self._get_search_response)
@patch(
'xmodule.modulestore.split_mongo.caching_descriptor_system.CachingDescriptorSystem.render', VanillaRuntime.render
)
@patch('xmodule.html_module.HtmlBlock.author_view', dummy_render, create=True)
@patch('xmodule.x_module.DescriptorSystem.applicable_aside_types', lambda self, block: [])
class TestLibraryContentRender(LibraryContentTest):
"""
Rendering unit tests for LibraryContentBlock
"""
def test_preview_view(self):
""" Test preview view rendering """
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
assert len(self.lc_block.children) == len(self.lib_blocks)
self._bind_course_module(self.lc_block)
rendered = self.lc_block.render(AUTHOR_VIEW, {'root_xblock': self.lc_block})
assert 'Hello world from block 1' in rendered.content
def test_author_view(self):
""" Test author view rendering """
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
assert len(self.lc_block.children) == len(self.lib_blocks)
self._bind_course_module(self.lc_block)
rendered = self.lc_block.render(AUTHOR_VIEW, {})
assert '' == rendered.content
# content should be empty
assert 'LibraryContentAuthorView' == rendered.js_init_fn
# but some js initialization should happen
class TestLibraryContentAnalytics(LibraryContentTest):
"""
Test analytics features of LibraryContentBlock
"""
def setUp(self):
super(TestLibraryContentAnalytics, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.publisher = Mock()
self.lc_block.refresh_children()
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
self.lc_block.xmodule_runtime.publish = self.publisher
def _assert_event_was_published(self, event_type):
"""
Check that a LibraryContentBlock analytics event was published by self.lc_block.
"""
assert self.publisher.called
assert len(self.publisher.call_args[0]) == 3 # pylint:disable=unsubscriptable-object
_, event_name, event_data = self.publisher.call_args[0] # pylint:disable=unsubscriptable-object
assert event_name == 'edx.librarycontentblock.content.{}'.format(event_type)
assert event_data['location'] == six.text_type(self.lc_block.location)
return event_data
def test_assigned_event(self):
"""
Test the "assigned" event emitted when a student is assigned specific blocks.
"""
# In the beginning was the lc_block and it assigned one child to the student:
child = self.lc_block.get_child_descriptors()[0]
child_lib_location, child_lib_version = self.store.get_block_original_usage(child.location)
assert isinstance(child_lib_version, ObjectId)
event_data = self._assert_event_was_published("assigned")
block_info = {
"usage_key": six.text_type(child.location),
"original_usage_key": six.text_type(child_lib_location),
"original_usage_version": six.text_type(child_lib_version),
"descendants": [],
}
assert event_data ==\
{'location': six.text_type(self.lc_block.location),
'added': [block_info],
'result': [block_info],
'previous_count': 0, 'max_count': 1}
self.publisher.reset_mock()
# Now increase max_count so that one more child will be added:
self.lc_block.max_count = 2
children = self.lc_block.get_child_descriptors()
assert len(children) == 2
child, new_child = children if children[0].location == child.location else reversed(children)
event_data = self._assert_event_was_published("assigned")
assert event_data['added'][0]['usage_key'] == six.text_type(new_child.location)
assert len(event_data['result']) == 2
assert event_data['previous_count'] == 1
assert event_data['max_count'] == 2
def test_assigned_event_published(self):
"""
Same as test_assigned_event but uses the published branch
"""
self.store.publish(self.course.location, self.user_id)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
self.lc_block.xmodule_runtime.publish = self.publisher
self.test_assigned_event()
def test_assigned_descendants(self):
"""
Test the "assigned" event emitted includes descendant block information.
"""
# Replace the blocks in the library with a block that has descendants:
with self.store.bulk_operations(self.library.location.library_key):
self.library.children = []
main_vertical = self.make_block("vertical", self.library)
inner_vertical = self.make_block("vertical", main_vertical)
html_block = self.make_block("html", inner_vertical)
problem_block = self.make_block("problem", inner_vertical)
self.lc_block.refresh_children()
# Reload lc_block and set it up for a student:
self.lc_block = self.store.get_item(self.lc_block.location)
self._bind_course_module(self.lc_block)
self.lc_block.xmodule_runtime.publish = self.publisher
# Get the keys of each of our blocks, as they appear in the course:
course_usage_main_vertical = self.lc_block.children[0]
course_usage_inner_vertical = self.store.get_item(course_usage_main_vertical).children[0]
inner_vertical_in_course = self.store.get_item(course_usage_inner_vertical)
course_usage_html = inner_vertical_in_course.children[0]
course_usage_problem = inner_vertical_in_course.children[1]
# Trigger a publish event:
self.lc_block.get_child_descriptors()
event_data = self._assert_event_was_published("assigned")
for block_list in (event_data["added"], event_data["result"]):
assert len(block_list) == 1
# main_vertical is the only root block added, and is the only result.
assert block_list[0]['usage_key'] == six.text_type(course_usage_main_vertical)
# Check that "descendants" is a flat, unordered list of all of main_vertical's descendants:
descendants_expected = (
(inner_vertical.location, course_usage_inner_vertical),
(html_block.location, course_usage_html),
(problem_block.location, course_usage_problem),
)
descendant_data_expected = {}
for lib_key, course_usage_key in descendants_expected:
descendant_data_expected[six.text_type(course_usage_key)] = {
"usage_key": six.text_type(course_usage_key),
"original_usage_key": six.text_type(lib_key),
"original_usage_version": six.text_type(self.store.get_block_original_usage(course_usage_key)[1]),
}
assert len(block_list[0]['descendants']) == len(descendant_data_expected)
for descendant in block_list[0]["descendants"]:
assert descendant == descendant_data_expected.get(descendant['usage_key'])
def test_removed_overlimit(self):
"""
Test the "removed" event emitted when we un-assign blocks previously assigned to a student.
We go from one blocks assigned to none because max_count has been decreased.
"""
# Decrease max_count to 1, causing the block to be overlimit:
self.lc_block.get_child_descriptors() # This line is needed in the test environment or the change has no effect
self.publisher.reset_mock() # Clear the "assigned" event that was just published.
self.lc_block.max_count = 0
# Check that the event says that one block was removed, leaving no blocks left:
children = self.lc_block.get_child_descriptors()
assert len(children) == 0
event_data = self._assert_event_was_published("removed")
assert len(event_data['removed']) == 1
assert event_data['result'] == []
assert event_data['reason'] == 'overlimit'
def test_removed_invalid(self):
"""
Test the "removed" event emitted when we un-assign blocks previously assigned to a student.
We go from two blocks assigned, to one because the others have been deleted from the library.
"""
# Start by assigning two blocks to the student:
self.lc_block.get_child_descriptors() # This line is needed in the test environment or the change has no effect
self.lc_block.max_count = 2
initial_blocks_assigned = self.lc_block.get_child_descriptors()
assert len(initial_blocks_assigned) == 2
self.publisher.reset_mock() # Clear the "assigned" event that was just published.
# Now make sure that one of the assigned blocks will have to be un-assigned.
# To cause an "invalid" event, we delete all blocks from the content library
# except for one of the two already assigned to the student:
keep_block_key = initial_blocks_assigned[0].location
keep_block_lib_usage_key, keep_block_lib_version = self.store.get_block_original_usage(keep_block_key)
assert keep_block_lib_usage_key is not None
deleted_block_key = initial_blocks_assigned[1].location
self.library.children = [keep_block_lib_usage_key]
self.store.update_item(self.library, self.user_id)
self.lc_block.refresh_children()
# Check that the event says that one block was removed, leaving one block left:
children = self.lc_block.get_child_descriptors()
assert len(children) == 1
event_data = self._assert_event_was_published("removed")
assert event_data['removed'] ==\
[{'usage_key': six.text_type(deleted_block_key),
'original_usage_key': None,
'original_usage_version': None,
'descendants': []}]
assert event_data['result'] ==\
[{'usage_key': six.text_type(keep_block_key),
'original_usage_key': six.text_type(keep_block_lib_usage_key),
'original_usage_version': six.text_type(keep_block_lib_version), 'descendants': []}]
assert event_data['reason'] == 'invalid'
| stvstnfrd/edx-platform | common/lib/xmodule/xmodule/tests/test_library_content.py | Python | agpl-3.0 | 26,994 | 0.002741 |
from django.contrib import admin
from . import models
# Register your models here.
#admin.site.register(models.MyUser)
@admin.register(models.MyUser)
class MyUserAdmin(admin.ModelAdmin):
list_display = ['user'] | pacocampo/Backend | traveleando/project/usuario/admin.py | Python | gpl-3.0 | 213 | 0.018779 |
from flask import request
from cache import (
add_to_queue, get_from_queue, get_from_any_queue, set_cache, get_cache,
delete_cache
)
import config
import consts
from connections import send_ws
from consts import WS_CHAT_MESSAGE
from decorators import validate
import errors
from game import Game
from handlers.v2.base import RestBase
from helpers import generate_token, get_prefix
from models import User, GamePool
from loggers import logger
from validators import GameNewValidator, GameMoveValidator
class RestGameBase(RestBase):
pre_methods = ['load_game']
def load_game(self, token):
try:
game = Game.load_game(token)
except errors.GameNotStartedError as e:
data = {
'type': consts.TYPES[e.type]['name'],
'limit': e.limit,
}
if (e.token):
data['invite'] = e.token
return data
except errors.GameNotFoundError as e:
raise errors.APIException(e.message)
if game._loaded_by == consts.WHITE:
if game.model.player_white is not None and game.model.player_white != request.user:
raise errors.APIException('wrong user')
else:
if game.model.player_black is not None and game.model.player_black != request.user:
raise errors.APIException('wrong user')
self.game = game
class RestTypes(RestBase):
def get(self):
types = [{
'name': t['name'],
'description': t['description'],
'periods': [{
'name': k,
'title': v[0],
} for k, v in sorted(t['periods'].items(), key=lambda a: a[1][1])],
} for t in consts.TYPES.values() if t['name'] != 'no limit']
return {'types': types}
class RestNewGame(RestBase):
def get(self):
result = []
count = 0
for pool in GamePool.select().where(
GamePool.is_started == False,
GamePool.is_lost == False,
GamePool.player1 is not None,
).order_by(GamePool.date_created.desc()):
if pool.user1 and pool.user1 == request.user:
continue
result.append({
'id': pool.pk,
'date_created': pool.date_created.isoformat(),
'user': pool.user1.username if pool.user1 else None,
'type': consts.TYPES[pool.type_game]['name'],
'limit': pool.time_limit,
})
count += 1
if count > 9:
break
return {'games': result}
@validate(GameNewValidator)
def post(self):
game_type = self.data['type']
game_limit = self.data['limit']
token = generate_token(True)
pool = GamePool.create(
player1 = token,
user1 = request.user,
type_game = game_type,
time_limit = game_limit,
)
set_cache('wait_{}'.format(token), (game_type, game_limit))
return {'game': token}
class RestAcceptGame(RestBase):
def post(self, game_id):
try:
pool = GamePool.get(GamePool.pk == game_id)
except GamePool.DoesNotExist:
raise errors.APINotFound('game')
except Exception as e:
raise errors.APIException('wrong format')
if pool.user1 and pool.user1 == request.user:
raise errors.APIException('you cannot start game with yourself')
pool.player2 = generate_token(True)
pool.user2 = request.user
pool.is_started = True
pool.save()
game = Game.new_game(
pool.player1, pool.player2, pool.type_game, pool.time_limit,
white_user=pool.user1, black_user=pool.user2
)
delete_cache('wait_{}'.format(pool.player1))
result = {'game': pool.player2}
result.update(game.get_info(consts.BLACK))
return result
class RestNewInvite(RestBase):
@validate(GameNewValidator)
def post(self):
game_type = self.data['type']
game_limit = self.data['limit']
if game_type != consts.TYPE_NOLIMIT and not game_limit:
raise errors.APIException('game limit must be set for no limit game')
token_game = generate_token(True)
token_invite = generate_token(True)
set_cache('invite_{}'.format(token_invite), (token_game, game_type, game_limit))
if request.user:
set_cache('user_{}'.format(token_game), request.user.pk, 3600)
set_cache('wait_{}'.format(token_game), (game_type, game_limit, token_invite))
return {
'game': token_game,
'invite': token_invite,
}
class RestAcceptInvite(RestBase):
def get(self, token):
try:
enemy_token, game_type, game_limit = get_cache('invite_{}'.format(token))
except:
raise errors.APINotFound('game')
enemy_user = None
user_id = get_cache('user_{}'.format(enemy_token))
if user_id:
try:
enemy_user = User.get(pk=user_id)
except User.DoesNotExist:
# TODO: if user not found game will be created with None as white player
pass
user_token = generate_token(True)
game = Game.new_game(
enemy_token, user_token, game_type, game_limit,
white_user=enemy_user, black_user=request.user
)
delete_cache('wait_{}'.format(enemy_token))
result = {'game': user_token}
result.update(game.get_info(consts.BLACK))
return result
class RestGames(RestBase):
def get(self):
from models import Game
result = {
'games': {
'actives': [],
'ended': [],
}
}
if request.user:
games = Game.select().where(
Game.date_end == None,
(Game.player_white == request.user) | (Game.player_black == request.user),
)
for game in games:
if game.player_white == request.user:
result['games']['actives'].append(game.white)
else:
result['games']['actives'].append(game.black)
games = Game.select().where(
Game.date_end != None,
(Game.player_white == request.user) | (Game.player_black == request.user),
).limit(10)
for game in games:
if game.player_white == request.user:
result['games']['ended'].append(game.white)
else:
result['games']['ended'].append(game.black)
return result
class RestInfo(RestGameBase):
def get(self, *args, **kwargs):
return self.game.get_info()
class RestMoves(RestGameBase):
def get(self, *args, **kwargs):
return self.game.moves()
@validate(GameMoveValidator)
def post(self, *args, **kwargs):
coor1 = self.data['coor1']
coor2 = self.data['coor2']
return self.game.move(coor1, coor2)
class RestDraw(RestGameBase):
# TODO: add get
def post(self, *args, **kwargs):
return self.game.draw_accept()
def delete(self, *args, **kwargs):
return self.game.draw_refuse()
class RestResign(RestGameBase):
def post(self, *args, **kwargs):
return self.game.resign()
| AHAPX/dark-chess | src/handlers/v2/game.py | Python | gpl-3.0 | 7,424 | 0.002963 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, fields, api, _
from odoo.addons.http_routing.models.ir_http import slug
class EventEvent(models.Model):
_inherit = "event.event"
community_menu = fields.Boolean(
"Community Menu", compute="_compute_community_menu",
readonly=False, store=True,
help="Display community tab on website")
community_menu_ids = fields.One2many(
"website.event.menu", "event_id", string="Event Community Menus",
domain=[("menu_type", "=", "community")])
@api.depends("event_type_id", "website_menu", "community_menu")
def _compute_community_menu(self):
""" At type onchange: synchronize. At website_menu update: synchronize. """
for event in self:
if event.event_type_id and event.event_type_id != event._origin.event_type_id:
event.community_menu = event.event_type_id.community_menu
elif event.website_menu and event.website_menu != event._origin.website_menu or not event.community_menu:
event.community_menu = True
elif not event.website_menu:
event.community_menu = False
# ------------------------------------------------------------
# WEBSITE MENU MANAGEMENT
# ------------------------------------------------------------
# OVERRIDES: ADD SEQUENCE
def _get_menu_update_fields(self):
update_fields = super(EventEvent, self)._get_menu_update_fields()
update_fields += ['community_menu']
return update_fields
def _update_website_menus(self, menus_update_by_field=None):
super(EventEvent, self)._update_website_menus(menus_update_by_field=menus_update_by_field)
for event in self:
if event.menu_id and (not menus_update_by_field or event in menus_update_by_field.get('community_menu')):
event._update_website_menu_entry('community_menu', 'community_menu_ids', '_get_community_menu_entries')
def _get_menu_type_field_matching(self):
res = super(EventEvent, self)._get_menu_type_field_matching()
res['community'] = 'community_menu'
return res
def _get_community_menu_entries(self):
self.ensure_one()
return [(_('Community'), '/event/%s/community' % slug(self), False, 80, 'community')]
def _get_track_menu_entries(self):
""" Remove agenda as this is now managed separately """
self.ensure_one()
return [
(_('Talks'), '/event/%s/track' % slug(self), False, 10, 'track'),
(_('Agenda'), '/event/%s/agenda' % slug(self), False, 70, 'track')
]
def _get_track_proposal_menu_entries(self):
""" See website_event_track._get_track_menu_entries() """
self.ensure_one()
return [(_('Talk Proposals'), '/event/%s/track_proposal' % slug(self), False, 15, 'track_proposal')]
| ygol/odoo | addons/website_event_track_online/models/event_event.py | Python | agpl-3.0 | 2,962 | 0.002701 |
"""Ring buffer for automatic storage of images"""
import os.path
import logging
from datetime import datetime
import numpy as np
from scipy.misc import imsave
import tables
class RingBuffer(object):
"""Ring buffer class.
Attributes
----------
directory : str
Location to store the ring buffer file.
recording : bool
True when data is being saved to the ring buffer.
N : int
Number of images to store in the ring buffer.
"""
def __init__(self, **kwargs):
"""Initialize the ring buffer.
Keyword arguments
-----------------
N : int
Number of images to store in the ring buffer.
directory : str
The directory to buffer images to.
filename : str
Filename to use for the HDF5 file.
recording : bool
Activate recording when True, disable when False.
logger : str
The name of the logger to use. Defaults to 'RingBuffer'.
roi : list
The currently selected region of interest.
"""
directory = kwargs.get('directory', '.')
filename = kwargs.get('filename', 'rbuffer.h5')
recording = kwargs.get('recording', True)
N = int(kwargs.get('N', 100))
logger = kwargs.get('logger', 'RingBuffer')
roi = kwargs.get('roi', [10, 100, 10, 100])
assert isinstance(directory, (str, unicode))
assert isinstance(filename, (str, unicode))
assert isinstance(recording, (int, bool))
assert isinstance(logger, (str, unicode))
assert isinstance(roi, (list, tuple, np.ndarray))
self.recording = recording
self.N = N
self.logger = logging.getLogger(logger)
self.roi = roi
self._index = 0
# Initialize HDF5 database.
self.filename = os.path.join(directory, filename)
self._db = tables.open_file(self.filename, 'w', title="Ring Buffer")
self._db.create_group('/', 'images', 'Buffered Images')
def __enter__(self):
return self
def __exit__(self, type_, value, tb):
self.close()
def close(self):
self._db.close()
def get_current_index(self):
"""Return the current index. This is in a function to
hopefully prevent the user from accessing _index directly
which could lead to bad things if it is modified!
"""
return self._index
def set_recording_state(self, state):
"""Explicitly set the recording state to state."""
assert isinstance(state, (bool, int))
self.recording = state
def toggle(self):
"""Toggle the recording state."""
self.recording = not self.recording
def write(self, data):
"""Write data to the ring buffer file."""
if not self.recording:
return
name = 'img{:04d}'.format(self._index)
try:
self._db.get_node('/images/' + name).remove()
except tables.NoSuchNodeError:
pass
finally:
# TODO: Adapt to CArray for compression
#filters = tables.Filters(complevel=5, complib='zlib')
arr = self._db.create_array('/images', name, data)
arr.attrs.timestamp = datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S.%f')
arr.attrs.roi = self.roi
arr.flush()
self._db.flush()
self._index = self._index + 1 if self._index < self.N - 1 else 0
def read(self, index):
"""Return data from the ring buffer file."""
img = self._db.get_node('/images/img{:04d}'.format(index))
return np.array(img)
def get_roi(self, index):
"""Return the recorded ROI for the given index."""
return self._db.get_node('/images/img{:04d}'.format(index)).attrs.roi
def save_as(self, filename):
"""Save the ring buffer to file filename. The output format
will depend on the extension of filename.
"""
self.logger.warning("Saving ring buffers to non-HDF5 formats is not yet properly implemented!")
# Save as PNG files in a zip archive.
if filename[-3:] == 'zip':
for node in self._db.list_nodes('/images'):
data = node.read()
imsave('./img.png', data)
if __name__ == "__main__":
from numpy import random
size = 512
img_size = (size, size)
with RingBuffer(N=100) as rb:
for i in range(200):
img = random.random(img_size)
rb.write(img)
rb.save_as('test.zip') | mivade/qCamera | qcamera/ring_buffer.py | Python | bsd-2-clause | 4,596 | 0.001958 |
# Build a simple house.
# Import necessary modules.
import mcpi.minecraft as minecraft
import mcpi.block as block
# Connect to Minecraft.
mc = minecraft.Minecraft.create()
# Set a base size for the house.
SIZE = 20
# Create the house function.
def house():
# Calculate the midpoints, used to position doors and windows.
midx = x + SIZE / 2
midy = y + SIZE / 2
# Build the outer shell.
mc.setBlocks(
x, y, z,
x + SIZE, y + SIZE, z + SIZE,
block.COBBLESTONE.id
)
# Carve out the inside of the house.
mc.setBlocks(
x + 1, y, z + 1,
x + SIZE - 2, y + SIZE - 1, z + SIZE - 2,
block.AIR.id
)
# Carve out the doorway.
mc.setBlocks(
midx - 1, y, z,
midx + 1, y + 3, z,
block.AIR.id
)
# Carve out two windows.
mc.setBlocks(
x + 3, y + SIZE - 3, z,
midx - 3, midy + 3, z,
block.GLASS.id
)
mc.setBlocks(
midx + 3, y + SIZE - 3, z,
x + SIZE - 3, midy + 3, z,
block.GLASS.id
)
# Add the roof
mc.setBlocks(
x, y + SIZE + 1, z,
x + SIZE, y + SIZE + 1, z + SIZE,
block.WOOD.id
)
# Add some carpet
mc.setBlocks(
x + 1, y - 1, z + 1,
x + SIZE - 2, y - 1, z + SIZE - 2,
block.WOOL.id, 14
)
# Get your position:
pos = mc.player.getTilePos()
# Store your coordinates.
x = pos.x + 2
y = pos.y
z = pos.z
house()
| craigem/MyAdventures | buildHouse2.py | Python | gpl-3.0 | 1,494 | 0 |
"""
raven.transport.registry
~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
# TODO(dcramer): we really should need to import all of these by default
from raven.transport.eventlet import EventletHTTPTransport
from raven.transport.exceptions import DuplicateScheme
from raven.transport.http import HTTPTransport
from raven.transport.gevent import GeventedHTTPTransport
from raven.transport.requests import RequestsHTTPTransport
from raven.transport.threaded import ThreadedHTTPTransport
from raven.transport.twisted import TwistedHTTPTransport
from raven.transport.tornado import TornadoHTTPTransport
from raven.transport.udp import UDPTransport
from raven.utils import urlparse
if sys.version_info >= (3, 3):
from raven.transport.aiohttp import AioHttpTransport
class TransportRegistry(object):
def __init__(self, transports=None):
# setup a default list of senders
self._schemes = {}
self._transports = {}
if transports:
for transport in transports:
self.register_transport(transport)
def register_transport(self, transport):
if not hasattr(transport, 'scheme') or not hasattr(transport.scheme, '__iter__'):
raise AttributeError('Transport %s must have a scheme list', transport.__class__.__name__)
for scheme in transport.scheme:
self.register_scheme(scheme, transport)
def register_scheme(self, scheme, cls):
"""
It is possible to inject new schemes at runtime
"""
if scheme in self._schemes:
raise DuplicateScheme()
urlparse.register_scheme(scheme)
# TODO (vng): verify the interface of the new class
self._schemes[scheme] = cls
def supported_scheme(self, scheme):
return scheme in self._schemes
def get_transport(self, parsed_url, **options):
full_url = parsed_url.geturl()
if full_url not in self._transports:
# Remove the options from the parsed_url
parsed_url = urlparse.urlparse(full_url.split('?')[0])
self._transports[full_url] = self._schemes[parsed_url.scheme](parsed_url, **options)
return self._transports[full_url]
def compute_scope(self, url, scope):
"""
Compute a scope dictionary. This may be overridden by custom
transports
"""
transport = self._schemes[url.scheme](url)
return transport.compute_scope(url, scope)
default_transports = [
HTTPTransport,
ThreadedHTTPTransport,
GeventedHTTPTransport,
TwistedHTTPTransport,
RequestsHTTPTransport,
TornadoHTTPTransport,
UDPTransport,
EventletHTTPTransport,
]
if sys.version_info >= (3, 3):
default_transports += [AioHttpTransport]
| inspirehep/raven-python | raven/transport/registry.py | Python | bsd-3-clause | 2,913 | 0.00103 |
#!/usr/bin/python2
'''
This is an example of how to build a simple generator
'''
def my_reverse(data):
for index in range(len(data) - 1, -1, -1):
yield data[index]
for char in my_reverse('golf'):
print(char)
'''
Notice that 'my_reverse' is still recognized as a plain function and not
'generator' or something.
When you do use it for data the return value is a 'generator'.
Compare this to pythons own 'reversed' generator:
- it is built in so it's type is type
- when using it as a generator it's type is 'reversed'.
'''
print(type(my_reverse))
print(type(my_reverse('golf')))
print(type(reversed))
print(type(reversed('golf')))
| nonZero/demos-python | src/examples/short/iteration/iterator_generator.py | Python | gpl-3.0 | 652 | 0.001534 |
# coding: utf-8
#
# GIL limit python multi-thread effectiveness.
# But is seems fine, because these operation have so many socket IO
# So it seems no need to use multiprocess
#
import uiautomator2 as u2
import adbutils
import threading
from logzero import logger
def worker(d: u2.Device):
d.app_start("io.appium.android.apis", stop=True)
d(text="App").wait()
for el in d.xpath("@android:id/list").child("/android.widget.TextView").all():
logger.info("%s click %s", d.serial, el.text)
el.click()
d.press("back")
logger.info("%s DONE", d.serial)
for dev in adbutils.adb.device_list():
print("Dev:", dev)
d = u2.connect(dev.serial)
t = threading.Thread(target=worker, args=(d,))
t.start()
| openatx/uiautomator2 | examples/multi-thread-example.py | Python | mit | 747 | 0.001339 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._management_locks_operations import build_create_or_update_at_resource_group_level_request, build_create_or_update_at_resource_level_request, build_create_or_update_at_subscription_level_request, build_create_or_update_by_scope_request, build_delete_at_resource_group_level_request, build_delete_at_resource_level_request, build_delete_at_subscription_level_request, build_delete_by_scope_request, build_get_at_resource_group_level_request, build_get_at_resource_level_request, build_get_at_subscription_level_request, build_get_by_scope_request, build_list_at_resource_group_level_request, build_list_at_resource_level_request, build_list_at_subscription_level_request, build_list_by_scope_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ManagementLocksOperations:
"""ManagementLocksOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.locks.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_or_update_at_resource_group_level(
self,
resource_group_name: str,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Creates or updates a management lock at the resource group level.
When you apply a lock at a parent scope, all child resources inherit the same lock. To create
management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group to lock.
:type resource_group_name: str
:param lock_name: The lock name. The lock name can be a maximum of 260 characters. It cannot
contain <, > %, &, :, \, ?, /, or any control characters.
:type lock_name: str
:param parameters: The management lock parameters.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_at_resource_group_level_request(
resource_group_name=resource_group_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_at_resource_group_level(
self,
resource_group_name: str,
lock_name: str,
**kwargs: Any
) -> None:
"""Deletes a management lock at the resource group level.
To delete management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group containing the lock.
:type resource_group_name: str
:param lock_name: The name of lock to delete.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_at_resource_group_level_request(
resource_group_name=resource_group_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_at_resource_group_level(
self,
resource_group_name: str,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Gets a management lock at the resource group level.
:param resource_group_name: The name of the locked resource group.
:type resource_group_name: str
:param lock_name: The name of the lock to get.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_resource_group_level_request(
resource_group_name=resource_group_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.get_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def create_or_update_by_scope(
self,
scope: str,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Create or update a management lock by scope.
:param scope: The scope for the lock. When providing a scope for the assignment, use
'/subscriptions/{subscriptionId}' for subscriptions,
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}' for resource groups, and
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
for resources.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:param parameters: Create or update management lock parameters.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_by_scope_request(
scope=scope,
lock_name=lock_name,
content_type=content_type,
json=_json,
template_url=self.create_or_update_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_by_scope(
self,
scope: str,
lock_name: str,
**kwargs: Any
) -> None:
"""Delete a management lock by scope.
:param scope: The scope for the lock.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_by_scope_request(
scope=scope,
lock_name=lock_name,
template_url=self.delete_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_by_scope(
self,
scope: str,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Get a management lock by scope.
:param scope: The scope for the lock.
:type scope: str
:param lock_name: The name of lock.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_by_scope_request(
scope=scope,
lock_name=lock_name,
template_url=self.get_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def create_or_update_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Creates or updates a management lock at the resource level or any level below the resource.
When you apply a lock at a parent scope, all child resources inherit the same lock. To create
management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group containing the resource to lock.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider namespace of the resource to lock.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource to lock.
:type resource_type: str
:param resource_name: The name of the resource to lock.
:type resource_name: str
:param lock_name: The name of lock. The lock name can be a maximum of 260 characters. It cannot
contain <, > %, &, :, \, ?, /, or any control characters.
:type lock_name: str
:param parameters: Parameters for creating or updating a management lock.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
lock_name: str,
**kwargs: Any
) -> None:
"""Deletes the management lock of a resource or any level below the resource.
To delete management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param resource_group_name: The name of the resource group containing the resource with the
lock to delete.
:type resource_group_name: str
:param resource_provider_namespace: The resource provider namespace of the resource with the
lock to delete.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the resource with the lock to delete.
:type resource_type: str
:param resource_name: The name of the resource with the lock to delete.
:type resource_name: str
:param lock_name: The name of the lock to delete.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Get the management lock of a resource or any level below resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: An extra path parameter needed in some services, like SQL
Databases.
:type parent_resource_path: str
:param resource_type: The type of the resource.
:type resource_type: str
:param resource_name: The name of the resource.
:type resource_name: str
:param lock_name: The name of lock.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.get_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def create_or_update_at_subscription_level(
self,
lock_name: str,
parameters: "_models.ManagementLockObject",
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Creates or updates a management lock at the subscription level.
When you apply a lock at a parent scope, all child resources inherit the same lock. To create
management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param lock_name: The name of lock. The lock name can be a maximum of 260 characters. It cannot
contain <, > %, &, :, \, ?, /, or any control characters.
:type lock_name: str
:param parameters: The management lock parameters.
:type parameters: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagementLockObject')
request = build_create_or_update_at_subscription_level_request(
lock_name=lock_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def delete_at_subscription_level(
self,
lock_name: str,
**kwargs: Any
) -> None:
"""Deletes the management lock at the subscription level.
To delete management locks, you must have access to Microsoft.Authorization/\ * or
Microsoft.Authorization/locks/* actions. Of the built-in roles, only Owner and User Access
Administrator are granted those actions.
:param lock_name: The name of lock to delete.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_at_subscription_level_request(
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace_async
async def get_at_subscription_level(
self,
lock_name: str,
**kwargs: Any
) -> "_models.ManagementLockObject":
"""Gets a management lock at the subscription level.
:param lock_name: The name of the lock to get.
:type lock_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagementLockObject, or the result of cls(response)
:rtype: ~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_at_subscription_level_request(
lock_name=lock_name,
subscription_id=self._config.subscription_id,
template_url=self.get_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagementLockObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks/{lockName}'} # type: ignore
@distributed_trace
def list_at_resource_group_level(
self,
resource_group_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a resource group.
:param resource_group_name: The name of the resource group containing the locks to get.
:type resource_group_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_resource_group_level_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_at_resource_group_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_resource_group_level_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_at_resource_group_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Authorization/locks'} # type: ignore
@distributed_trace
def list_at_resource_level(
self,
resource_group_name: str,
resource_provider_namespace: str,
parent_resource_path: str,
resource_type: str,
resource_name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a resource or any level below resource.
:param resource_group_name: The name of the resource group containing the locked resource. The
name is case insensitive.
:type resource_group_name: str
:param resource_provider_namespace: The namespace of the resource provider.
:type resource_provider_namespace: str
:param parent_resource_path: The parent resource identity.
:type parent_resource_path: str
:param resource_type: The resource type of the locked resource.
:type resource_type: str
:param resource_name: The name of the locked resource.
:type resource_name: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_at_resource_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_resource_level_request(
resource_group_name=resource_group_name,
resource_provider_namespace=resource_provider_namespace,
parent_resource_path=parent_resource_path,
resource_type=resource_type,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_at_resource_level.metadata = {'url': '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePath}/{resourceType}/{resourceName}/providers/Microsoft.Authorization/locks'} # type: ignore
@distributed_trace
def list_at_subscription_level(
self,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a subscription.
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_at_subscription_level_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_at_subscription_level.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_at_subscription_level_request(
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_at_subscription_level.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/locks'} # type: ignore
@distributed_trace
def list_by_scope(
self,
scope: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.ManagementLockListResult"]:
"""Gets all the management locks for a scope.
:param scope: The scope for the lock. When providing a scope for the assignment, use
'/subscriptions/{subscriptionId}' for subscriptions,
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}' for resource groups, and
'/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
for resources.
:type scope: str
:param filter: The filter to apply on the operation.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagementLockListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.resource.locks.v2016_09_01.models.ManagementLockListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagementLockListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_scope_request(
scope=scope,
filter=filter,
template_url=self.list_by_scope.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_scope_request(
scope=scope,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagementLockListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_scope.metadata = {'url': '/{scope}/providers/Microsoft.Authorization/locks'} # type: ignore
| Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/locks/v2016_09_01/aio/operations/_management_locks_operations.py | Python | mit | 49,288 | 0.005011 |
#coding: UTF-8
'''This script would guide the seafile admin to setup seafile with MySQL'''
import sys
import os
import time
import re
import shutil
import glob
import subprocess
import hashlib
import getpass
import uuid
import warnings
import MySQLdb
from ConfigParser import ConfigParser
try:
import readline # pylint: disable=W0611
except ImportError:
pass
SERVER_MANUAL_HTTP = 'https://github.com/haiwen/seafile/wiki'
class Utils(object):
'''Groups all helper functions here'''
@staticmethod
def welcome():
'''Show welcome message'''
welcome_msg = '''\
-----------------------------------------------------------------
This script will guide you to setup your seafile server using MySQL.
Make sure you have read seafile server manual at
%s
Press ENTER to continue
-----------------------------------------------------------------''' % SERVER_MANUAL_HTTP
print welcome_msg
raw_input()
@staticmethod
def highlight(content):
'''Add ANSI color to content to get it highlighted on terminal'''
return '\x1b[33m%s\x1b[m' % content
@staticmethod
def info(msg):
print msg
@staticmethod
def error(msg):
'''Print error and exit'''
print
print 'Error: ' + msg
sys.exit(1)
@staticmethod
def run_argv(argv, cwd=None, env=None, suppress_stdout=False, suppress_stderr=False):
'''Run a program and wait it to finish, and return its exit code. The
standard output of this program is supressed.
'''
with open(os.devnull, 'w') as devnull:
if suppress_stdout:
stdout = devnull
else:
stdout = sys.stdout
if suppress_stderr:
stderr = devnull
else:
stderr = sys.stderr
proc = subprocess.Popen(argv,
cwd=cwd,
stdout=stdout,
stderr=stderr,
env=env)
return proc.wait()
@staticmethod
def run(cmdline, cwd=None, env=None, suppress_stdout=False, suppress_stderr=False):
'''Like run_argv but specify a command line string instead of argv'''
with open(os.devnull, 'w') as devnull:
if suppress_stdout:
stdout = devnull
else:
stdout = sys.stdout
if suppress_stderr:
stderr = devnull
else:
stderr = sys.stderr
proc = subprocess.Popen(cmdline,
cwd=cwd,
stdout=stdout,
stderr=stderr,
env=env,
shell=True)
return proc.wait()
@staticmethod
def prepend_env_value(name, value, env=None, seperator=':'):
'''prepend a new value to a list'''
if env is None:
env = os.environ
try:
current_value = env[name]
except KeyError:
current_value = ''
new_value = value
if current_value:
new_value += seperator + current_value
env[name] = new_value
@staticmethod
def must_mkdir(path):
'''Create a directory, exit on failure'''
try:
os.mkdir(path)
except OSError, e:
Utils.error('failed to create directory %s:%s' % (path, e))
@staticmethod
def must_copy(src, dst):
'''Copy src to dst, exit on failure'''
try:
shutil.copy(src, dst)
except Exception, e:
Utils.error('failed to copy %s to %s: %s' % (src, dst, e))
@staticmethod
def find_in_path(prog):
if 'win32' in sys.platform:
sep = ';'
else:
sep = ':'
dirs = os.environ['PATH'].split(sep)
for d in dirs:
d = d.strip()
if d == '':
continue
path = os.path.join(d, prog)
if os.path.exists(path):
return path
return None
@staticmethod
def get_python_executable():
'''Return the python executable. This should be the PYTHON environment
variable which is set in setup-seafile-mysql.sh
'''
return os.environ['PYTHON']
@staticmethod
def read_config(fn):
'''Return a case sensitive ConfigParser by reading the file "fn"'''
cp = ConfigParser()
cp.optionxform = str
cp.read(fn)
return cp
@staticmethod
def write_config(cp, fn):
'''Return a case sensitive ConfigParser by reading the file "fn"'''
with open(fn, 'w') as fp:
cp.write(fp)
@staticmethod
def ask_question(desc,
key=None,
note=None,
default=None,
validate=None,
yes_or_no=False,
password=False):
'''Ask a question, return the answer.
@desc description, e.g. "What is the port of ccnet?"
@key a name to represent the target of the question, e.g. "port for
ccnet server"
@note additional information for the question, e.g. "Must be a valid
port number"
@default the default value of the question. If the default value is
not None, when the user enter nothing and press [ENTER], the default
value would be returned
@validate a function that takes the user input as the only parameter
and validate it. It should return a validated value, or throws an
"InvalidAnswer" exception if the input is not valid.
@yes_or_no If true, the user must answer "yes" or "no", and a boolean
value would be returned
@password If true, the user input would not be echoed to the
console
'''
assert key or yes_or_no
# Format description
print
if note:
desc += '\n' + note
desc += '\n'
if yes_or_no:
desc += '[ yes or no ]'
else:
if default:
desc += '[ default "%s" ]' % default
else:
desc += '[ %s ]' % key
desc += ' '
while True:
# prompt for user input
if password:
answer = getpass.getpass(desc).strip()
else:
answer = raw_input(desc).strip()
# No user input: use default
if not answer:
if default:
answer = default
else:
continue
# Have user input: validate answer
if yes_or_no:
if answer not in ['yes', 'no']:
print Utils.highlight('\nPlease answer yes or no\n')
continue
else:
return answer == 'yes'
else:
if validate:
try:
return validate(answer)
except InvalidAnswer, e:
print Utils.highlight('\n%s\n' % e)
continue
else:
return answer
@staticmethod
def validate_port(port):
try:
port = int(port)
except ValueError:
raise InvalidAnswer('%s is not a valid port' % Utils.highlight(port))
if port <= 0 or port > 65535:
raise InvalidAnswer('%s is not a valid port' % Utils.highlight(port))
return port
class InvalidAnswer(Exception):
def __init__(self, msg):
Exception.__init__(self)
self.msg = msg
def __str__(self):
return self.msg
### END of Utils
####################
class EnvManager(object):
'''System environment and directory layout'''
def __init__(self):
self.install_path = os.path.dirname(os.path.abspath(__file__))
self.top_dir = os.path.dirname(self.install_path)
self.bin_dir = os.path.join(self.install_path, 'seafile', 'bin')
def check_pre_condiction(self):
def error_if_not_exists(path):
if not os.path.exists(path):
Utils.error('"%s" not found' % path)
paths = [
os.path.join(self.install_path, 'seafile'),
os.path.join(self.install_path, 'seahub'),
os.path.join(self.install_path, 'runtime'),
]
for path in paths:
error_if_not_exists(path)
if os.path.exists(ccnet_config.ccnet_dir):
Utils.error('Ccnet config dir \"%s\" already exists.' % ccnet_config.ccnet_dir)
def get_seahub_env(self):
'''Prepare for seahub syncdb'''
env = dict(os.environ)
env['CCNET_CONF_DIR'] = ccnet_config.ccnet_dir
env['SEAFILE_CONF_DIR'] = seafile_config.seafile_dir
self.setup_python_path(env)
return env
def setup_python_path(self, env):
'''And PYTHONPATH and CCNET_CONF_DIR/SEAFILE_CONF_DIR to env, which is
needed by seahub
'''
install_path = self.install_path
pro_pylibs_dir = os.path.join(install_path, 'pro', 'python')
extra_python_path = [
pro_pylibs_dir,
os.path.join(install_path, 'seahub', 'thirdpart'),
os.path.join(install_path, 'seafile/lib/python2.6/site-packages'),
os.path.join(install_path, 'seafile/lib64/python2.6/site-packages'),
os.path.join(install_path, 'seafile/lib/python2.7/site-packages'),
os.path.join(install_path, 'seafile/lib64/python2.7/site-packages'),
]
for path in extra_python_path:
Utils.prepend_env_value('PYTHONPATH', path, env=env)
def get_binary_env(self):
'''Set LD_LIBRARY_PATH for seafile server executables'''
env = dict(os.environ)
lib_dir = os.path.join(self.install_path, 'seafile', 'lib')
lib64_dir = os.path.join(self.install_path, 'seafile', 'lib64')
Utils.prepend_env_value('LD_LIBRARY_PATH', lib_dir, env=env)
Utils.prepend_env_value('LD_LIBRARY_PATH', lib64_dir, env=env)
return env
class AbstractConfigurator(object):
'''Abstract Base class for ccnet/seafile/seahub/db configurator'''
def __init__(self):
pass
def ask_questions(self):
raise NotImplementedError
def generate(self):
raise NotImplementedError
class AbstractDBConfigurator(AbstractConfigurator):
'''Abstract class for database related configuration'''
def __init__(self):
AbstractConfigurator.__init__(self)
self.mysql_host = 'localhost'
self.mysql_port = 3306
self.use_existing_db = False
self.seafile_mysql_user = ''
self.seafile_mysql_password = ''
self.ccnet_db_name = ''
self.seafile_db_name = ''
self.seahub_db_name = ''
self.seahub_admin_email = ''
self.seahub_admin_password = ''
@staticmethod
def ask_use_existing_db():
def validate(choice):
if choice not in ['1', '2']:
raise InvalidAnswer('Please choose 1 or 2')
return choice == '2'
question = '''\
-------------------------------------------------------
Please choose a way to initialize seafile databases:
-------------------------------------------------------
'''
note = '''\
[1] Create new ccnet/seafile/seahub databases
[2] Use existing ccnet/seafile/seahub databases
'''
return Utils.ask_question(question,
key='1 or 2',
note=note,
validate=validate)
def ask_mysql_host_port(self):
def validate(host):
if not re.match(r'^[a-zA-Z0-9_\-\.]+$', host):
raise InvalidAnswer('%s is not a valid host' % Utils.highlight(host))
if host == 'localhost':
host = '127.0.0.1'
question = 'What is the port of mysql server?'
key = 'mysql server port'
default = '3306'
port = Utils.ask_question(question,
key=key,
default=default,
validate=Utils.validate_port)
# self.check_mysql_server(host, port)
self.mysql_port = port
return host
question = 'What is the host of mysql server?'
key = 'mysql server host'
default = 'localhost'
self.mysql_host = Utils.ask_question(question,
key=key,
default=default,
validate=validate)
def check_mysql_server(self, host, port):
print '\nverifying mysql server running ... ',
try:
dummy = MySQLdb.connect(host=host, port=port)
except Exception:
print
raise InvalidAnswer('Failed to connect to mysql server at "%s:%s"' \
% (host, port))
print 'done'
def check_mysql_user(self, user, password):
print '\nverifying password of user %s ... ' % user,
kwargs = dict(host=self.mysql_host,
port=self.mysql_port,
user=user,
passwd=password)
try:
conn = MySQLdb.connect(**kwargs)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
raise InvalidAnswer('Failed to connect to mysql server using user "%s" and password "***": %s' \
% (user, e.args[1]))
else:
raise InvalidAnswer('Failed to connect to mysql server using user "%s" and password "***": %s' \
% (user, e))
print 'done'
return conn
def create_seahub_admin(self):
try:
conn = MySQLdb.connect(host=self.mysql_host,
port=self.mysql_port,
user=self.seafile_mysql_user,
passwd=self.seafile_mysql_password,
db=self.ccnet_db_name)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to connect to mysql database %s: %s' % (self.ccnet_db_name, e.args[1]))
else:
Utils.error('Failed to connect to mysql database %s: %s' % (self.ccnet_db_name, e))
cursor = conn.cursor()
sql = '''\
CREATE TABLE IF NOT EXISTS EmailUser (id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, email VARCHAR(255), passwd CHAR(64), is_staff BOOL NOT NULL, is_active BOOL NOT NULL, ctime BIGINT, UNIQUE INDEX (email)) ENGINE=INNODB'''
try:
cursor.execute(sql)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to create ccnet user table: %s' % e.args[1])
else:
Utils.error('Failed to create ccnet user table: %s' % e)
sql = '''REPLACE INTO EmailUser(email, passwd, is_staff, is_active, ctime) VALUES ('%s', '%s', 1, 1, 0)''' \
% (seahub_config.admin_email, seahub_config.hashed_admin_password())
try:
cursor.execute(sql)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to create admin user: %s' % e.args[1])
else:
Utils.error('Failed to create admin user: %s' % e)
conn.commit()
def ask_questions(self):
'''Ask questions and do database operations'''
raise NotImplementedError
class NewDBConfigurator(AbstractDBConfigurator):
'''Handles the case of creating new mysql databases for ccnet/seafile/seahub'''
def __init__(self):
AbstractDBConfigurator.__init__(self)
self.root_password = ''
self.root_conn = ''
def ask_questions(self):
self.ask_mysql_host_port()
self.ask_root_password()
self.ask_seafile_mysql_user_password()
self.ask_db_names()
def generate(self):
if not self.mysql_user_exists(self.seafile_mysql_user):
self.create_user()
self.create_databases()
def ask_root_password(self):
def validate(password):
self.root_conn = self.check_mysql_user('root', password)
return password
question = 'What is the password of the mysql root user?'
key = 'root password'
self.root_password = Utils.ask_question(question,
key=key,
validate=validate,
password=True)
def mysql_user_exists(self, user):
cursor = self.root_conn.cursor()
sql = '''SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = '%s')''' % user
try:
cursor.execute(sql)
return cursor.fetchall()[0][0]
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to check mysql user %s: %s' % (user, e.args[1]))
else:
Utils.error('Failed to check mysql user %s: %s' % (user, e))
finally:
cursor.close()
def ask_seafile_mysql_user_password(self):
def validate(user):
if user == 'root':
self.seafile_mysql_password = self.root_password
else:
question = 'Enter the password for mysql user "%s":' % Utils.highlight(user)
key = 'password for %s' % user
password = Utils.ask_question(question, key=key, password=True)
# If the user already exists, check the password here
if self.mysql_user_exists(user):
self.check_mysql_user(user, password)
self.seafile_mysql_password = password
return user
question = 'Enter the name for mysql user of seafile. It would be created if not exists.'
key = 'mysql user for seafile'
default = 'root'
self.seafile_mysql_user = Utils.ask_question(question,
key=key,
default=default,
validate=validate)
def ask_db_name(self, program, default):
question = 'Enter the database name for %s:' % program
key = '%s database' % program
return Utils.ask_question(question,
key=key,
default=default,
validate=self.validate_db_name)
def ask_db_names(self):
self.ccnet_db_name = self.ask_db_name('ccnet-server', 'ccnet-db')
self.seafile_db_name = self.ask_db_name('seafile-server', 'seafile-db')
self.seahub_db_name = self.ask_db_name('seahub', 'seahub-db')
def validate_db_name(self, db_name):
return db_name
def create_user(self):
cursor = self.root_conn.cursor()
sql = '''CREATE USER '%s'@'localhost' IDENTIFIED BY '%s' ''' \
% (self.seafile_mysql_user, self.seafile_mysql_password)
try:
cursor.execute(sql)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to create mysql user %s: %s' % (self.seafile_mysql_user, e.args[1]))
else:
Utils.error('Failed to create mysql user %s: %s' % (self.seafile_mysql_user, e))
finally:
cursor.close()
def create_db(self, db_name):
cursor = self.root_conn.cursor()
sql = '''CREATE DATABASE IF NOT EXISTS `%s` CHARACTER SET UTF8''' \
% db_name
try:
cursor.execute(sql)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to create database %s: %s' % (db_name, e.args[1]))
else:
Utils.error('Failed to create database %s: %s' % (db_name, e))
finally:
cursor.close()
def grant_db_permission(self, db_name):
cursor = self.root_conn.cursor()
sql = '''GRANT ALL PRIVILEGES ON `%s`.* to `%s` ''' \
% (db_name, self.seafile_mysql_user)
try:
cursor.execute(sql)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
Utils.error('Failed to grant permission of database %s: %s' % (db_name, e.args[1]))
else:
Utils.error('Failed to grant permission of database %s: %s' % (db_name, e))
finally:
cursor.close()
def create_databases(self):
self.create_db(self.ccnet_db_name)
self.create_db(self.seafile_db_name)
self.create_db(self.seahub_db_name)
if self.seafile_mysql_user != 'root':
self.grant_db_permission(self.ccnet_db_name)
self.grant_db_permission(self.seafile_db_name)
self.grant_db_permission(self.seahub_db_name)
class ExistingDBConfigurator(AbstractDBConfigurator):
'''Handles the case of use existing mysql databases for ccnet/seafile/seahub'''
def __init__(self):
AbstractDBConfigurator.__init__(self)
self.use_existing_db = True
def ask_questions(self):
self.ask_mysql_host_port()
self.ask_existing_mysql_user_password()
self.ccnet_db_name = self.ask_db_name('ccnet')
self.seafile_db_name = self.ask_db_name('seafile')
self.seahub_db_name = self.ask_db_name('seahub')
def generate(self):
pass
def ask_existing_mysql_user_password(self):
def validate(user):
question = 'What is the password for mysql user "%s"?' % Utils.highlight(user)
key = 'password for %s' % user
password = Utils.ask_question(question, key=key, password=True)
self.check_mysql_user(user, password)
self.seafile_mysql_password = password
return user
question = 'Which mysql user to use for seafile?'
key = 'mysql user for seafile'
self.seafile_mysql_user = Utils.ask_question(question,
key=key,
validate=validate)
def ask_db_name(self, program):
def validate(db_name):
if self.seafile_mysql_user != 'root':
self.check_user_db_access(db_name)
return db_name
question = 'Enter the existing database name for %s:' % program
key = '%s database' % program
return Utils.ask_question(question,
key=key,
validate=validate)
def check_user_db_access(self, db_name):
user = self.seafile_mysql_user
password = self.seafile_mysql_password
print '\nverifying user "%s" access to database %s ... ' % (user, db_name),
try:
conn = MySQLdb.connect(host=self.mysql_host,
port=self.mysql_port,
user=user,
passwd=password,
db=db_name)
except Exception, e:
if isinstance(e, MySQLdb.OperationalError):
raise InvalidAnswer('Failed to access database %s using user "%s" and password "***": %s' \
% (db_name, user, e.args[1]))
else:
raise InvalidAnswer('Failed to access database %s using user "%s" and password "***": %s' \
% (db_name, user, e))
print 'done'
return conn
class CcnetConfigurator(AbstractConfigurator):
SERVER_NAME_REGEX = r'^[a-zA-Z0-9_\-]{3,14}$'
SERVER_IP_OR_DOMAIN_REGEX = '^[^.].+\..+[^.]$'
def __init__(self):
'''Initialize default values of ccnet configuration'''
AbstractConfigurator.__init__(self)
self.ccnet_dir = os.path.join(env_mgr.top_dir, 'ccnet')
self.port = 10001
self.server_name = 'my-seafile'
self.ip_or_domain = None
def ask_questions(self):
self.ask_server_name()
self.ask_server_ip_or_domain()
self.ask_port()
def generate(self):
print 'Generating ccnet configuration ...\n'
ccnet_init = os.path.join(env_mgr.bin_dir, 'ccnet-init')
argv = [
ccnet_init,
'--config-dir', self.ccnet_dir,
'--name', self.server_name,
'--host', self.ip_or_domain,
'--port', str(self.port),
]
if Utils.run_argv(argv, env=env_mgr.get_binary_env()) != 0:
Utils.error('Failed to generate ccnet configuration')
time.sleep(1)
self.generate_db_conf()
def generate_db_conf(self):
ccnet_conf = os.path.join(self.ccnet_dir, 'ccnet.conf')
config = Utils.read_config(ccnet_conf)
# [Database]
# ENGINE=
# HOST=
# USER=
# PASSWD=
# DB=
db_section = 'Database'
if not config.has_section(db_section):
config.add_section(db_section)
config.set(db_section, 'ENGINE', 'mysql')
config.set(db_section, 'HOST', db_config.mysql_host)
config.set(db_section, 'PORT', db_config.mysql_port)
config.set(db_section, 'USER', db_config.seafile_mysql_user)
config.set(db_section, 'PASSWD', db_config.seafile_mysql_password)
config.set(db_section, 'DB', db_config.ccnet_db_name)
config.set(db_section, 'CONNECTION_CHARSET', 'utf8')
Utils.write_config(config, ccnet_conf)
def ask_server_name(self):
def validate(name):
if not re.match(self.SERVER_NAME_REGEX, name):
raise InvalidAnswer('%s is not a valid name' % Utils.highlight(name))
return name
question = 'What is the name of the server? It will be displayed on the client.'
key = 'server name'
note = '3 - 15 letters or digits'
self.server_name = Utils.ask_question(question,
key=key,
note=note,
validate=validate)
def ask_server_ip_or_domain(self):
def validate(ip_or_domain):
if not re.match(self.SERVER_IP_OR_DOMAIN_REGEX, ip_or_domain):
raise InvalidAnswer('%s is not a valid ip or domain' % ip_or_domain)
return ip_or_domain
question = 'What is the ip or domain of the server?'
key = 'This server\'s ip or domain'
note = 'For example: www.mycompany.com, 192.168.1.101'
self.ip_or_domain = Utils.ask_question(question,
key=key,
note=note,
validate=validate)
def ask_port(self):
def validate(port):
return Utils.validate_port(port)
question = 'Which port do you want to use for the ccnet server?'
key = 'ccnet server port'
default = 10001
self.port = Utils.ask_question(question,
key=key,
default=default,
validate=validate)
class SeafileConfigurator(AbstractConfigurator):
def __init__(self):
AbstractConfigurator.__init__(self)
self.seafile_dir = os.path.join(env_mgr.top_dir, 'seafile-data')
self.port = 12001
self.httpserver_port = 8082
def ask_questions(self):
self.ask_seafile_dir()
self.ask_port()
self.ask_httpserver_port()
def generate(self):
print 'Generating seafile configuration ...\n'
seafserv_init = os.path.join(env_mgr.bin_dir, 'seaf-server-init')
argv = [
seafserv_init,
'--seafile-dir', self.seafile_dir,
'--port', str(self.port),
'--httpserver-port', str(self.httpserver_port),
]
if Utils.run_argv(argv, env=env_mgr.get_binary_env()) != 0:
Utils.error('Failed to generate ccnet configuration')
time.sleep(1)
self.generate_db_conf()
self.write_seafile_ini()
print 'done'
def generate_db_conf(self):
seafile_conf = os.path.join(self.seafile_dir, 'seafile.conf')
config = Utils.read_config(seafile_conf)
# [database]
# type=
# host=
# user=
# password=
# db_name=
# unix_socket=
db_section = 'database'
if not config.has_section(db_section):
config.add_section(db_section)
config.set(db_section, 'type', 'mysql')
config.set(db_section, 'host', db_config.mysql_host)
config.set(db_section, 'port', db_config.mysql_port)
config.set(db_section, 'user', db_config.seafile_mysql_user)
config.set(db_section, 'password', db_config.seafile_mysql_password)
config.set(db_section, 'db_name', db_config.seafile_db_name)
config.set(db_section, 'connection_charset', 'utf8')
Utils.write_config(config, seafile_conf)
def ask_seafile_dir(self):
def validate(path):
if os.path.exists(path):
raise InvalidAnswer('%s already exists' % Utils.highlight(path))
return path
question = 'Where do you want to put your seafile data?'
key = 'seafile-data'
note = 'Please use a volume with enough free space'
default = os.path.join(env_mgr.top_dir, 'seafile-data')
self.seafile_dir = Utils.ask_question(question,
key=key,
note=note,
default=default,
validate=validate)
def ask_port(self):
def validate(port):
port = Utils.validate_port(port)
if port == ccnet_config.port:
raise InvalidAnswer('%s is used by ccnet server, choose another one' \
% Utils.highlight(port))
return port
question = 'Which port do you want to use for the seafile server?'
key = 'seafile server port'
default = 12001
self.port = Utils.ask_question(question,
key=key,
default=default,
validate=validate)
def ask_httpserver_port(self):
def validate(port):
port = Utils.validate_port(port)
if port == ccnet_config.port:
raise InvalidAnswer('%s is used by ccnet server, choose another one' \
% Utils.highlight(port))
if port == seafile_config.port:
raise InvalidAnswer('%s is used by seafile server, choose another one' \
% Utils.highlight(port))
return port
question = 'Which port do you want to use for the seafile httpserver?'
key = 'seafile httpserver port'
default = 8082
self.httpserver_port = Utils.ask_question(question,
key=key,
default=default,
validate=validate)
def write_seafile_ini(self):
seafile_ini = os.path.join(ccnet_config.ccnet_dir, 'seafile.ini')
with open(seafile_ini, 'w') as fp:
fp.write(self.seafile_dir)
class SeahubConfigurator(AbstractConfigurator):
def __init__(self):
AbstractConfigurator.__init__(self)
self.admin_email = ''
self.admin_password = ''
def hashed_admin_password(self):
return hashlib.sha1(self.admin_password).hexdigest() # pylint: disable=E1101
def ask_questions(self):
pass
# self.ask_admin_email()
# self.ask_admin_password()
def generate(self):
'''Generating seahub_settings.py'''
print 'Generating seahub configuration ...\n'
time.sleep(1)
seahub_settings_py = os.path.join(env_mgr.top_dir, 'seahub_settings.py')
with open(seahub_settings_py, 'w') as fp:
self.write_secret_key(fp)
self.write_database_config(fp)
def write_secret_key(self, fp):
text = 'SECRET_KEY = "%s"\n\n' % self.gen_secret_key()
fp.write(text)
def write_database_config(self, fp):
template = '''\
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '%(name)s',
'USER': '%(username)s',
'PASSWORD': '%(password)s',
'HOST': '%(host)s',
'PORT': '%(port)s',
'OPTIONS': {
'init_command': 'SET storage_engine=INNODB',
}
}
}
'''
text = template % dict(name=db_config.seahub_db_name,
username=db_config.seafile_mysql_user,
password=db_config.seafile_mysql_password,
host=db_config.mysql_host,
port=db_config.mysql_port)
fp.write(text)
def gen_secret_key(self):
data = str(uuid.uuid4()) + str(uuid.uuid4())
return data[:40]
def ask_admin_email(self):
print
print '----------------------------------------'
print 'Now let\'s create the admin account'
print '----------------------------------------'
def validate(email):
# whitespace is not allowed
if re.match(r'[\s]', email):
raise InvalidAnswer('%s is not a valid email address' % Utils.highlight(email))
# must be a valid email address
if not re.match(r'^.+@.*\..+$', email):
raise InvalidAnswer('%s is not a valid email address' % Utils.highlight(email))
return email
key = 'admin email'
question = 'What is the ' + Utils.highlight('email') + ' for the admin account?'
self.admin_email = Utils.ask_question(question,
key=key,
validate=validate)
def ask_admin_password(self):
def validate(password):
key = 'admin password again'
question = 'Enter the ' + Utils.highlight('password again:')
password_again = Utils.ask_question(question,
key=key,
password=True)
if password_again != password:
raise InvalidAnswer('password mismatch')
return password
key = 'admin password'
question = 'What is the ' + Utils.highlight('password') + ' for the admin account?'
self.admin_password = Utils.ask_question(question,
key=key,
password=True,
validate=validate)
def do_syncdb(self):
print '----------------------------------------'
print 'Now creating seahub database tables ...\n'
print '----------------------------------------'
env = env_mgr.get_seahub_env()
cwd = os.path.join(env_mgr.install_path, 'seahub')
argv = [
Utils.get_python_executable(),
'manage.py',
'syncdb',
]
if Utils.run_argv(argv, cwd=cwd, env=env) != 0:
Utils.error("Failed to create seahub databases")
def prepare_avatar_dir(self):
# media_dir=${INSTALLPATH}/seahub/media
# orig_avatar_dir=${INSTALLPATH}/seahub/media/avatars
# dest_avatar_dir=${TOPDIR}/seahub-data/avatars
# if [[ ! -d ${dest_avatar_dir} ]]; then
# mkdir -p "${TOPDIR}/seahub-data"
# mv "${orig_avatar_dir}" "${dest_avatar_dir}"
# ln -s ../../../seahub-data/avatars ${media_dir}
# fi
try:
media_dir = os.path.join(env_mgr.install_path, 'seahub', 'media')
orig_avatar_dir = os.path.join(media_dir, 'avatars')
seahub_data_dir = os.path.join(env_mgr.top_dir, 'seahub-data')
dest_avatar_dir = os.path.join(seahub_data_dir, 'avatars')
if os.path.exists(dest_avatar_dir):
return
if not os.path.exists(seahub_data_dir):
os.mkdir(seahub_data_dir)
shutil.move(orig_avatar_dir, dest_avatar_dir)
os.symlink('../../../seahub-data/avatars', orig_avatar_dir)
except Exception, e:
Utils.error('Failed to prepare seahub avatars dir: %s' % e)
class SeafDavConfigurator(AbstractConfigurator):
def __init__(self):
AbstractConfigurator.__init__(self)
self.conf_dir = None
self.seafdav_conf = None
def ask_questions(self):
pass
def generate(self):
self.conf_dir = os.path.join(env_mgr.top_dir, 'conf')
if not os.path.exists('conf'):
Utils.must_mkdir(self.conf_dir)
self.seafdav_conf = os.path.join(self.conf_dir, 'seafdav.conf')
text = '''
[WEBDAV]
enabled = false
port = 8080
fastcgi = false
share_name = /
'''
with open(self.seafdav_conf, 'w') as fp:
fp.write(text)
class UserManualHandler(object):
def __init__(self):
self.src_docs_dir = os.path.join(env_mgr.install_path, 'seafile', 'docs')
self.library_template_dir = None
def copy_user_manuals(self):
self.library_template_dir = os.path.join(seafile_config.seafile_dir, 'library-template')
Utils.must_mkdir(self.library_template_dir)
pattern = os.path.join(self.src_docs_dir, '*.doc')
for doc in glob.glob(pattern):
Utils.must_copy(doc, self.library_template_dir)
def report_config():
print
print '---------------------------------'
print 'This is your configuration'
print '---------------------------------'
print
template = '''\
server name: %(server_name)s
server ip/domain: %(ip_or_domain)s
ccnet port: %(ccnet_port)s
seafile data dir: %(seafile_dir)s
seafile port: %(seafile_port)s
httpserver port: %(httpserver_port)s
database: %(use_existing_db)s
ccnet database: %(ccnet_db_name)s
seafile database: %(seafile_db_name)s
seahub database: %(seahub_db_name)s
database user: %(db_user)s
'''
config = {
'server_name' : ccnet_config.server_name,
'ip_or_domain' : ccnet_config.ip_or_domain,
'ccnet_port' : ccnet_config.port,
'seafile_dir' : seafile_config.seafile_dir,
'seafile_port' : seafile_config.port,
'httpserver_port' : seafile_config.httpserver_port,
'admin_email' : seahub_config.admin_email,
'use_existing_db': 'use exising' if db_config.use_existing_db else 'create new',
'ccnet_db_name': db_config.ccnet_db_name,
'seafile_db_name': db_config.seafile_db_name,
'seahub_db_name': db_config.seahub_db_name,
'db_user': db_config.seafile_mysql_user
}
print template % config
print
print '---------------------------------'
print 'Press ENTER to continue, or Ctrl-C to abort'
print '---------------------------------'
raw_input()
def create_seafile_server_symlink():
print '\ncreating seafile-server-latest symbolic link ... ',
seafile_server_symlink = os.path.join(env_mgr.top_dir, 'seafile-server-latest')
try:
os.symlink(os.path.basename(env_mgr.install_path), seafile_server_symlink)
except Exception, e:
print '\n'
Utils.error('Failed to create symbolic link %s: %s' % (seafile_server_symlink, e))
else:
print 'done\n\n'
env_mgr = EnvManager()
ccnet_config = CcnetConfigurator()
seafile_config = SeafileConfigurator()
seafdav_config = SeafDavConfigurator()
seahub_config = SeahubConfigurator()
user_manuals_handler = UserManualHandler()
# Would be created after AbstractDBConfigurator.ask_use_existing_db()
db_config = None
def main():
global db_config
Utils.welcome()
warnings.filterwarnings('ignore', category=MySQLdb.Warning)
env_mgr.check_pre_condiction()
# Part 1: collect configuration
ccnet_config.ask_questions()
seafile_config.ask_questions()
seahub_config.ask_questions()
if AbstractDBConfigurator.ask_use_existing_db():
db_config = ExistingDBConfigurator()
else:
db_config = NewDBConfigurator()
db_config.ask_questions()
report_config()
# Part 2: generate configuration
db_config.generate()
ccnet_config.generate()
seafile_config.generate()
seafdav_config.generate()
seahub_config.generate()
seahub_config.do_syncdb()
seahub_config.prepare_avatar_dir()
# db_config.create_seahub_admin()
user_manuals_handler.copy_user_manuals()
create_seafile_server_symlink()
report_success()
def report_success():
message = '''\
-----------------------------------------------------------------
Your seafile server configuration has been finished successfully.
-----------------------------------------------------------------
run seafile server: ./seafile.sh { start | stop | restart }
run seahub server: ./seahub.sh { start <port> | stop | restart <port> }
-----------------------------------------------------------------
If you are behind a firewall, remember to allow input/output of these tcp ports:
-----------------------------------------------------------------
port of ccnet server: %(ccnet_port)s
port of seafile server: %(seafile_port)s
port of seafile httpserver: %(httpserver_port)s
port of seahub: 8000
When problems occur, Refer to
%(server_manual_http)s
for information.
'''
print message % dict(ccnet_port=ccnet_config.port,
seafile_port=seafile_config.port,
httpserver_port=seafile_config.httpserver_port,
server_manual_http=SERVER_MANUAL_HTTP)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print
print Utils.highlight('The setup process is aborted')
print
| Chilledheart/seafile | scripts/setup-seafile-mysql.py | Python | gpl-3.0 | 43,261 | 0.002104 |
from django.conf.urls import include, url
from .views import alquiler_nuevo, home
from django.contrib.auth.decorators import login_required
from departamentos.views import home, details
urlpatterns = [
]
| acs-um/deptos | deptos/departamentos/urls.py | Python | apache-2.0 | 209 | 0.004785 |
"""Tests for UIAWrapper"""
from __future__ import print_function
from __future__ import unicode_literals
import time
import os
import sys
import unittest
import mock
import six
sys.path.append(".")
from pywinauto.application import Application, WindowSpecification # noqa: E402
from pywinauto.sysinfo import is_x64_Python, UIA_support # noqa: E402
from pywinauto.timings import Timings # noqa: E402
from pywinauto.actionlogger import ActionLogger # noqa: E402
from pywinauto import Desktop
from pywinauto import mouse # noqa: E402
if UIA_support:
import comtypes
import pywinauto.uia_defines as uia_defs
import pywinauto.controls.uia_controls as uia_ctls
wpf_samples_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\WPF_samples")
if is_x64_Python():
wpf_samples_folder = os.path.join(wpf_samples_folder, 'x64')
wpf_app_1 = os.path.join(wpf_samples_folder, u"WpfApplication1.exe")
mfc_samples_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\MFC_samples")
if is_x64_Python():
mfc_samples_folder = os.path.join(mfc_samples_folder, 'x64')
mfc_app_rebar_test = os.path.join(mfc_samples_folder, u"RebarTest.exe")
winforms_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\WinForms_samples")
if is_x64_Python():
winforms_folder = os.path.join(winforms_folder, 'x64')
winfoms_app_grid = os.path.join(winforms_folder, u"DataGridView_TestApp.exe")
if UIA_support:
def _set_timings():
"""Setup timings for UIA related tests"""
Timings.defaults()
Timings.window_find_timeout = 20
class UIAWrapperTests(unittest.TestCase):
"""Unit tests for the UIAWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
mouse.move((-500, 500)) # remove the mouse from the screen to avoid side effects
# start the application
self.app = Application(backend='uia')
self.app = self.app.start(wpf_app_1)
self.dlg = self.app.WPFSampleApplication
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_issue_296(self):
"""Test handling of disappered descendants"""
wrp = self.dlg.wrapper_object()
orig = wrp.element_info._element.FindAll
wrp.element_info._element.FindAll = mock.Mock(side_effect=ValueError("Mocked value error"),
return_value=[]) # empty list
self.assertEqual([], wrp.descendants())
exception_err = comtypes.COMError(-2147220991, "Mocked COM error", ())
wrp.element_info._element.FindAll = mock.Mock(side_effect=exception_err,
return_value=[]) # empty list
self.assertEqual([], wrp.descendants())
wrp.element_info._element.FindAll = orig # restore the original method
def test_issue_278(self):
"""Test that statement menu = app.MainWindow.Menu works for 'uia' backend"""
menu_spec = self.dlg.Menu
self.assertTrue(isinstance(menu_spec, WindowSpecification))
def test_find_nontop_ctl_by_class_name_and_title(self):
"""Test getting a non-top control by a class name and a title"""
# Look up for a non-top button control with 'Apply' caption
self.dlg.wait('ready')
caption = 'Apply'
wins = self.app.windows(top_level_only=False,
class_name='Button',
title=caption)
# Verify the number of found wrappers
self.assertEqual(len(wins), 1)
# Verify the caption of the found wrapper
self.assertEqual(wins[0].texts()[0], caption)
def test_find_top_win_by_class_name_and_title(self):
"""Test getting a top window by a class name and a title"""
# Since the top_level_only is True by default
# we don't specify it as a criteria argument
self.dlg.wait('ready')
caption = 'WPF Sample Application'
wins = self.app.windows(class_name='Window', title=caption)
# Verify the number of found wrappers
self.assertEqual(len(wins), 1)
# Verify the caption of the found wrapper
self.assertEqual(wins[0].texts()[0], caption)
def test_class(self):
"""Test getting the classname of the dialog"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.class_name(), "Button")
def test_window_text(self):
"""Test getting the window Text of the dialog"""
label = self.dlg.TestLabel.wrapper_object()
self.assertEqual(label.window_text(), u"TestLabel")
self.assertEqual(label.can_be_label, True)
def test_control_id(self):
"""Test getting control ID"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.control_id(), None)
def test_runtime_id(self):
"""Test getting runtime ID"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertNotEqual(button.__hash__(), 0)
orig = button.element_info._element.GetRuntimeId
exception_err = comtypes.COMError(-2147220991, 'An event was unable to invoke any of the subscribers', ())
button.element_info._element.GetRuntimeId = mock.Mock(side_effect=exception_err)
self.assertEqual(button.__hash__(), 0)
button.element_info._element.GetRuntimeId = orig # restore the original method
def test_automation_id(self):
"""Test getting automation ID"""
alpha_toolbar = self.dlg.child_window(title="Alpha", control_type="ToolBar")
button = alpha_toolbar.child_window(control_type="Button",
auto_id="OverflowButton").wrapper_object()
self.assertEqual(button.automation_id(), "OverflowButton")
def test_is_visible(self):
"""Test is_visible method of a control"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.is_visible(), True)
def test_is_enabled(self):
"""Test is_enabled method of a control"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.is_enabled(), True)
def test_process_id(self):
"""Test process_id method of a control"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.process_id(), self.dlg.process_id())
self.assertNotEqual(button.process_id(), 0)
def test_is_dialog(self):
"""Test is_dialog method of a control"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.is_dialog(), False)
def test_parent(self):
"""Test getting a parent of a control"""
button = self.dlg.Alpha.wrapper_object()
self.assertEqual(button.parent(), self.dlg.wrapper_object())
def test_top_level_parent(self):
"""Test getting a top-level parent of a control"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.top_level_parent(), self.dlg.wrapper_object())
def test_texts(self):
"""Test getting texts of a control"""
self.assertEqual(self.dlg.texts(), ['WPF Sample Application'])
def test_children(self):
"""Test getting children of a control"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(len(button.children()), 1)
self.assertEqual(button.children()[0].class_name(), "TextBlock")
def test_children_generator(self):
"""Test iterating children of a control"""
button = self.dlg.child_window(class_name="Button", title="OK").wrapper_object()
children = [child for child in button.iter_children()]
self.assertEqual(len(children), 1)
self.assertEqual(children[0].class_name(), "TextBlock")
def test_descendants(self):
"""Test iterating descendants of a control"""
toolbar = self.dlg.child_window(title="Alpha", control_type="ToolBar").wrapper_object()
descendants = toolbar.descendants()
self.assertEqual(len(descendants), 7)
def test_descendants_generator(self):
toolbar = self.dlg.child_window(title="Alpha", control_type="ToolBar").wrapper_object()
descendants = [desc for desc in toolbar.iter_descendants()]
self.assertSequenceEqual(toolbar.descendants(), descendants)
def test_is_child(self):
"""Test is_child method of a control"""
button = self.dlg.Alpha.wrapper_object()
self.assertEqual(button.is_child(self.dlg.wrapper_object()), True)
def test_equals(self):
"""Test controls comparisons"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertNotEqual(button, self.dlg.wrapper_object())
self.assertEqual(button, button.element_info)
self.assertEqual(button, button)
def test_scroll(self):
"""Test scroll"""
# Check an exception on a non-scrollable control
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
six.assertRaisesRegex(self, AttributeError, "not scrollable",
button.scroll, "left", "page")
# Check an exception on a control without horizontal scroll bar
tab = self.dlg.Tree_and_List_Views.set_focus()
listview = tab.children(class_name=u"ListView")[0]
six.assertRaisesRegex(self, AttributeError, "not horizontally scrollable",
listview.scroll, "right", "line")
# Check exceptions on wrong arguments
self.assertRaises(ValueError, listview.scroll, "bbbb", "line")
self.assertRaises(ValueError, listview.scroll, "up", "aaaa")
# Store a cell position
cell = listview.cell(3, 0)
orig_rect = cell.rectangle()
self.assertEqual(orig_rect.left > 0, True)
# Trigger a horizontal scroll bar on the control
hdr = listview.get_header_control()
hdr_itm = hdr.children()[1]
trf = hdr_itm.iface_transform
trf.resize(1000, 20)
listview.scroll("right", "page", 2)
self.assertEqual(cell.rectangle().left < 0, True)
# Check an exception on a control without vertical scroll bar
tab = self.dlg.ListBox_and_Grid.set_focus()
datagrid = tab.children(class_name=u"DataGrid")[0]
six.assertRaisesRegex(self, AttributeError, "not vertically scrollable",
datagrid.scroll, "down", "page")
# def testVerifyActionable(self):
# self.assertRaises()
# def testVerifyEnabled(self):
# self.assertRaises()
# def testVerifyVisible(self):
# self.assertRaises()
def test_is_keyboard_focusable(self):
"""Test is_keyboard focusable method of several controls"""
edit = self.dlg.TestLabelEdit.wrapper_object()
label = self.dlg.TestLabel.wrapper_object()
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.assertEqual(button.is_keyboard_focusable(), True)
self.assertEqual(edit.is_keyboard_focusable(), True)
self.assertEqual(label.is_keyboard_focusable(), False)
def test_set_focus(self):
"""Test setting a keyboard focus on a control"""
edit = self.dlg.TestLabelEdit.wrapper_object()
edit.set_focus()
self.assertEqual(edit.has_keyboard_focus(), True)
def test_type_keys(self):
"""Test sending key types to a control"""
edit = self.dlg.TestLabelEdit.wrapper_object()
edit.type_keys("t")
self.assertEqual(edit.window_text(), "t")
edit.type_keys("e")
self.assertEqual(edit.window_text(), "te")
edit.type_keys("s")
self.assertEqual(edit.window_text(), "tes")
edit.type_keys("t")
self.assertEqual(edit.window_text(), "test")
edit.type_keys("T")
self.assertEqual(edit.window_text(), "testT")
edit.type_keys("y")
self.assertEqual(edit.window_text(), "testTy")
def test_no_pattern_interface_error(self):
"""Test a query interface exception handling"""
button = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
elem = button.element_info.element
self.assertRaises(
uia_defs.NoPatternInterfaceError,
uia_defs.get_elem_interface,
elem,
"Selection",
)
def test_minimize_maximize(self):
"""Test window minimize/maximize operations"""
wrp = self.dlg.minimize()
self.dlg.wait_not('active')
self.assertEqual(wrp.is_minimized(), True)
wrp.maximize()
self.dlg.wait('active')
self.assertEqual(wrp.is_maximized(), True)
wrp.minimize()
self.dlg.wait_not('active')
wrp.restore()
self.dlg.wait('active')
self.assertEqual(wrp.is_normal(), True)
def test_get_properties(self):
"""Test getting writeble properties of a control"""
uia_props = set(['class_name',
'friendly_class_name',
'texts',
'control_id',
'rectangle',
'is_visible',
'is_enabled',
'control_count',
'is_keyboard_focusable',
'has_keyboard_focus',
'selection_indices',
'automation_id',
])
edit = self.dlg.TestLabelEdit.wrapper_object()
props = set(edit.get_properties().keys())
self.assertEqual(props, uia_props)
# def test_draw_outline(self):
# """Test the outline was drawn."""
# # not sure why, but this extra call makes the test stable
# self.dlg.draw_outline()
#
# # outline control
# button = self.dlg.OK.wrapper_object()
# button.draw_outline()
# img1 = button.capture_as_image()
# self.assertEqual(img1.getpixel((0, 0)), (0, 255, 0)) # green
#
# # outline window
# self.dlg.draw_outline(colour="blue")
# img2 = self.dlg.capture_as_image()
# self.assertEqual(img2.getpixel((0, 0)), (0, 0, 255)) # blue
def test_get_legacy_properties(self):
"""Test getting legacy properties of a control"""
expected_properties = {'Value': '',
'DefaultAction': 'Press',
'Description': '',
'Name': 'OK',
'Help': '',
'ChildId': 0,
'KeyboardShortcut': '',
'State': 1048576,
'Role': 43}
button_wrp = self.dlg.child_window(class_name="Button",
title="OK").wrapper_object()
actual_properties = button_wrp.legacy_properties()
self.assertEqual(actual_properties, expected_properties)
def test_capture_as_image_multi_monitor(self):
with mock.patch('win32api.EnumDisplayMonitors') as mon_device:
mon_device.return_value = (1, 2)
rect = self.dlg.rectangle()
expected = (rect.width(), rect.height())
result = self.dlg.capture_as_image().size
self.assertEqual(expected, result)
class UIAWrapperMouseTests(unittest.TestCase):
"""Unit tests for mouse actions of the UIAWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
self.app = Application(backend='uia')
self.app = self.app.start(wpf_app_1)
dlg = self.app.WPFSampleApplication
self.button = dlg.child_window(class_name="Button",
title="OK").wrapper_object()
self.label = dlg.child_window(class_name="Text", title="TestLabel").wrapper_object()
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
# def test_click(self):
# pass
def test_click_input(self):
"""Test click_input method of a control"""
time.sleep(0.5)
self.button.click_input()
self.assertEqual(self.label.window_text(), "LeftClick")
# def test_double_click(self):
# pass
def test_double_click_input(self):
"""Test double_click_input method of a control"""
self.button.double_click_input()
self.assertEqual(self.label.window_text(), "DoubleClick")
# def test_right_click(self):
# pass
def test_right_click_input(self):
"""Test right_click_input method of a control"""
time.sleep(0.5)
self.button.right_click_input()
self.assertEqual(self.label.window_text(), "RightClick")
# def test_press_move_release(self):
# pass
class UiaControlsTests(unittest.TestCase):
"""Unit tests for the UIA control wrappers"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
self.app = app.start(wpf_app_1)
self.dlg = self.app.WPFSampleApplication
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_pretty_print(self):
"""Test __str__ and __repr__ methods for UIA based controls"""
if six.PY3:
assert_regex = self.assertRegex
else:
assert_regex = self.assertRegexpMatches
wrp = self.dlg.OK.wrapper_object()
assert_regex(wrp.__str__(), "^uia_controls\.ButtonWrapper - 'OK', Button$")
assert_regex(wrp.__repr__(), "^<uia_controls\.ButtonWrapper - 'OK', Button, [0-9-]+>$")
wrp = self.dlg.CheckBox.wrapper_object()
assert_regex(wrp.__str__(), "^uia_controls\.ButtonWrapper - 'CheckBox', CheckBox$", )
assert_regex(wrp.__repr__(), "^<uia_controls\.ButtonWrapper - 'CheckBox', CheckBox, [0-9-]+>$", )
wrp = self.dlg.child_window(class_name="TextBox").wrapper_object()
assert_regex(wrp.__str__(), "^uia_controls\.EditWrapper - '', Edit$")
assert_regex(wrp.__repr__(), "^<uia_controls\.EditWrapper - '', Edit, [0-9-]+>$")
assert_regex(wrp.element_info.__str__(), "^uia_element_info.UIAElementInfo - '', TextBox$")
assert_regex(wrp.element_info.__repr__(), "^<uia_element_info.UIAElementInfo - '', TextBox, None>$")
wrp = self.dlg.TabControl.wrapper_object()
assert_regex(wrp.__str__(), "^uia_controls\.TabControlWrapper - '', TabControl$")
assert_regex(wrp.__repr__(), "^<uia_controls\.TabControlWrapper - '', TabControl, [0-9-]+>$")
wrp = self.dlg.MenuBar.wrapper_object()
assert_regex(wrp.__str__(), "^uia_controls\.MenuWrapper - 'System', Menu$")
assert_regex(wrp.__repr__(), "^<uia_controls\.MenuWrapper - 'System', Menu, [0-9-]+>$")
wrp = self.dlg.Slider.wrapper_object()
assert_regex(wrp.__str__(), "^uia_controls\.SliderWrapper - '', Slider$")
assert_regex(wrp.__repr__(), "^<uia_controls\.SliderWrapper - '', Slider, [0-9-]+>$")
wrp = self.dlg.TestLabel.wrapper_object()
assert_regex(wrp.__str__(),
"^uia_controls.StaticWrapper - 'TestLabel', Static$")
assert_regex(wrp.__repr__(),
"^<uia_controls.StaticWrapper - 'TestLabel', Static, [0-9-]+>$")
wrp = self.dlg.wrapper_object()
assert_regex(wrp.__str__(), "^uiawrapper\.UIAWrapper - 'WPF Sample Application', Dialog$")
assert_regex(wrp.__repr__(), "^<uiawrapper\.UIAWrapper - 'WPF Sample Application', Dialog, [0-9-]+>$")
# ElementInfo.__str__
assert_regex(wrp.element_info.__str__(),
"^uia_element_info.UIAElementInfo - 'WPF Sample Application', Window$")
assert_regex(wrp.element_info.__repr__(),
"^<uia_element_info.UIAElementInfo - 'WPF Sample Application', Window, [0-9-]+>$")
# mock a failure in window_text() method
orig = wrp.window_text
wrp.window_text = mock.Mock(return_value="") # empty text
assert_regex(wrp.__str__(), "^uiawrapper\.UIAWrapper - '', Dialog$")
assert_regex(wrp.__repr__(), "^<uiawrapper\.UIAWrapper - '', Dialog, [0-9-]+>$")
wrp.window_text.return_value = u'\xd1\xc1\\\xa1\xb1\ua000' # unicode string
assert_regex(wrp.__str__(), "^uiawrapper\.UIAWrapper - '.+', Dialog$")
wrp.window_text = orig # restore the original method
# mock a failure in element_info.name property (it's based on _get_name())
orig = wrp.element_info._get_name
wrp.element_info._get_name = mock.Mock(return_value=None)
assert_regex(wrp.element_info.__str__(), "^uia_element_info\.UIAElementInfo - 'None', Window$")
assert_regex(wrp.element_info.__repr__(), "^<uia_element_info\.UIAElementInfo - 'None', Window, [0-9-]+>$")
wrp.element_info._get_name = orig
def test_pretty_print_encode_error(self):
"""Test __repr__ method for BaseWrapper with specific Unicode text (issue #594)"""
wrp = self.dlg.wrapper_object()
wrp.window_text = mock.Mock(return_value=u'\xb7')
print(wrp)
print(repr(wrp))
def test_friendly_class_names(self):
"""Test getting friendly class names of common controls"""
button = self.dlg.OK.wrapper_object()
self.assertEqual(button.friendly_class_name(), "Button")
friendly_name = self.dlg.CheckBox.friendly_class_name()
self.assertEqual(friendly_name, "CheckBox")
friendly_name = self.dlg.Apply.friendly_class_name()
self.assertEqual(friendly_name, "Button")
friendly_name = self.dlg.ToggleMe.friendly_class_name()
self.assertEqual(friendly_name, "Button")
friendly_name = self.dlg.Yes.friendly_class_name()
self.assertEqual(friendly_name, "RadioButton")
friendly_name = self.dlg.TabControl.friendly_class_name()
self.assertEqual(friendly_name, "TabControl")
edit = self.dlg.child_window(class_name="TextBox").wrapper_object()
self.assertEqual(edit.friendly_class_name(), "Edit")
slider = self.dlg.Slider.wrapper_object()
self.assertEqual(slider.friendly_class_name(), "Slider")
self.assertEqual(self.dlg.MenuBar.friendly_class_name(), "Menu")
self.assertEqual(self.dlg.Toolbar.friendly_class_name(), "Toolbar")
# Switch tab view
tab_item_wrp = self.dlg.TreeAndListViews.set_focus()
ctrl = tab_item_wrp.children(control_type="DataGrid")[0]
self.assertEqual(ctrl.friendly_class_name(), "ListView")
i = ctrl.get_item(1)
self.assertEqual(i.friendly_class_name(), "DataItem")
ctrl = tab_item_wrp.children(control_type="Tree")[0]
self.assertEqual(ctrl.friendly_class_name(), "TreeView")
ti = self.dlg.Tree_and_List_ViewsTabItem.DateElements
self.assertEqual(ti.friendly_class_name(), "TreeItem")
def test_check_box(self):
"""Test 'toggle' and 'toggle_state' for the check box control"""
# Get a current state of the check box control
check_box = self.dlg.CheckBox.wrapper_object()
cur_state = check_box.get_toggle_state()
self.assertEqual(cur_state, uia_defs.toggle_state_inderteminate)
# Toggle the next state
cur_state = check_box.toggle().get_toggle_state()
# Get a new state of the check box control
self.assertEqual(cur_state, uia_defs.toggle_state_off)
def test_toggle_button(self):
"""Test 'toggle' and 'toggle_state' for the toggle button control"""
# Get a current state of the check box control
button = self.dlg.ToggleMe.wrapper_object()
cur_state = button.get_toggle_state()
self.assertEqual(cur_state, uia_defs.toggle_state_on)
# Toggle the next state
cur_state = button.toggle().get_toggle_state()
# Get a new state of the check box control
self.assertEqual(cur_state, uia_defs.toggle_state_off)
# Toggle the next state
cur_state = button.toggle().get_toggle_state()
self.assertEqual(cur_state, uia_defs.toggle_state_on)
def test_button_click(self):
"""Test the click method for the Button control"""
label = self.dlg.child_window(class_name="Text",
title="TestLabel").wrapper_object()
self.dlg.Apply.click()
self.assertEqual(label.window_text(), "ApplyClick")
def test_radio_button(self):
"""Test 'select' and 'is_selected' for the radio button control"""
yes = self.dlg.Yes.wrapper_object()
cur_state = yes.is_selected()
self.assertEqual(cur_state, False)
cur_state = yes.select().is_selected()
self.assertEqual(cur_state, True)
no = self.dlg.No.wrapper_object()
cur_state = no.click().is_selected()
self.assertEqual(cur_state, True)
def test_combobox_texts(self):
"""Test items texts for the combo box control"""
# The ComboBox on the sample app has following items:
# 0. Combo Item 1
# 1. Combo Item 2
ref_texts = ['Combo Item 1', 'Combo Item 2']
combo_box = self.dlg.ComboBox.wrapper_object()
self.assertEqual(combo_box.item_count(), len(ref_texts))
for t in combo_box.texts():
self.assertEqual((t in ref_texts), True)
# Mock a 0 pointer to COM element
combo_box.iface_item_container.FindItemByProperty = mock.Mock(return_value=0)
self.assertEqual(combo_box.texts(), ref_texts)
# Mock a combobox without "ItemContainer" pattern
combo_box.iface_item_container.FindItemByProperty = mock.Mock(side_effect=uia_defs.NoPatternInterfaceError())
self.assertEqual(combo_box.texts(), ref_texts)
# Mock a combobox without "ExpandCollapse" pattern
# Expect empty texts
combo_box.iface_expand_collapse.Expand = mock.Mock(side_effect=uia_defs.NoPatternInterfaceError())
self.assertEqual(combo_box.texts(), [])
def test_combobox_select(self):
"""Test select related methods for the combo box control"""
combo_box = self.dlg.ComboBox.wrapper_object()
# Verify combobox properties and an initial state
self.assertEqual(combo_box.can_select_multiple(), 0)
self.assertEqual(combo_box.is_selection_required(), False)
self.assertEqual(len(combo_box.get_selection()), 0)
# The ComboBox on the sample app has following items:
# 0. Combo Item 1
# 1. Combo Item 2
combo_box.select(0)
self.assertEqual(combo_box.selected_text(), 'Combo Item 1')
self.assertEqual(combo_box.selected_index(), 0)
collapsed = combo_box.is_collapsed()
self.assertEqual(collapsed, True)
combo_box.select(1)
self.assertEqual(combo_box.selected_text(), 'Combo Item 2')
self.assertEqual(combo_box.selected_index(), 1)
combo_box.select('Combo Item 1')
self.assertEqual(combo_box.selected_text(), 'Combo Item 1')
# Try to use unsupported item type as a parameter for select
self.assertRaises(ValueError, combo_box.select, 1.2)
# Try to select a non-existing item,
# verify the selected item didn't change
self.assertRaises(IndexError, combo_box.select, 'Combo Item 23455')
self.assertEqual(combo_box.selected_text(), 'Combo Item 1')
def test_combobox_expand_collapse(self):
"""Test 'expand' and 'collapse' for the combo box control"""
combo_box = self.dlg.ComboBox.wrapper_object()
collapsed = combo_box.is_collapsed()
self.assertEqual(collapsed, True)
expanded = combo_box.expand().is_expanded()
self.assertEqual(expanded, True)
collapsed = combo_box.collapse().is_collapsed()
self.assertEqual(collapsed, True)
class TabControlWrapperTests(unittest.TestCase):
"""Unit tests for the TabControlWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
app = app.start(wpf_app_1)
dlg = app.WPFSampleApplication
self.app = app
self.ctrl = dlg.child_window(class_name="TabControl").wrapper_object()
self.texts = [u"General", u"Tree and List Views", u"ListBox and Grid"]
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_tab_count(self):
"""Test the tab count in the Tab control"""
self.assertEqual(self.ctrl.tab_count(), len(self.texts))
def test_get_selected_tab(self):
"""Test selecting a tab by index or by name and getting an index of the selected tab"""
# Select a tab by name, use chaining to get the index of the selected tab
idx = self.ctrl.select(u"Tree and List Views").get_selected_tab()
self.assertEqual(idx, 1)
# Select a tab by index
self.ctrl.select(0)
self.assertEqual(self.ctrl.get_selected_tab(), 0)
def test_texts(self):
"""Make sure the tabs captions are read correctly"""
self.assertEqual(self.ctrl.texts(), self.texts)
class EditWrapperTests(unittest.TestCase):
"""Unit tests for the EditWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
app = app.start(wpf_app_1)
self.app = app
self.dlg = app.WPFSampleApplication
self.edit = self.dlg.child_window(class_name="TextBox").wrapper_object()
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_set_window_text(self):
"""Test setting text value of control (the text in textbox itself)"""
text_to_set = "This test"
self.assertRaises(UserWarning, self.edit.set_window_text, text_to_set)
self.assertEqual(self.edit.text_block(), text_to_set)
self.assertRaises(UserWarning, self.edit.set_window_text, " is done", True)
self.assertEqual(self.edit.text_block(), text_to_set + " is done")
def test_set_text(self):
"""Test setting the text of the edit control"""
self.edit.set_edit_text("Some text")
self.assertEqual(self.edit.text_block(), "Some text")
self.edit.set_edit_text(579)
self.assertEqual(self.edit.text_block(), "579")
self.edit.set_edit_text(333, pos_start=1, pos_end=2)
self.assertEqual(self.edit.text_block(), "53339")
def test_line_count(self):
"""Test getting the line count of the edit control"""
self.edit.set_edit_text("Here is some text")
self.assertEqual(self.edit.line_count(), 1)
def test_cet_line(self):
"""Test getting each line of the edit control"""
test_data = "Here is some text"
self.edit.set_edit_text(test_data)
self.assertEqual(self.edit.get_line(0), test_data)
def test_get_value(self):
"""Test getting value of the edit control"""
test_data = "Some value"
self.edit.set_edit_text(test_data)
self.assertEqual(self.edit.get_value(), test_data)
def test_text_block(self):
"""Test getting the text block of the edit control"""
test_data = "Here is some text"
self.edit.set_edit_text(test_data)
self.assertEqual(self.edit.text_block(), test_data)
def test_select(self):
"""Test selecting text in the edit control in various ways"""
self.edit.set_edit_text("Some text")
self.edit.select(0, 0)
self.assertEqual((0, 0), self.edit.selection_indices())
self.edit.select()
self.assertEqual((0, 9), self.edit.selection_indices())
self.edit.select(1, 7)
self.assertEqual((1, 7), self.edit.selection_indices())
self.edit.select(5, 2)
self.assertEqual((2, 5), self.edit.selection_indices())
self.edit.select("me t")
self.assertEqual((2, 6), self.edit.selection_indices())
self.assertRaises(RuntimeError, self.edit.select, "123")
class SliderWrapperTests(unittest.TestCase):
"""Unit tests for the EditWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
app = app.start(wpf_app_1)
self.app = app
self.dlg = app.WPFSampleApplication
self.slider = self.dlg.child_window(class_name="Slider").wrapper_object()
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_min_value(self):
"""Test getting minimum value of the Slider"""
self.assertEqual(self.slider.min_value(), 0.0)
def test_max_value(self):
"""Test getting maximum value of the Slider"""
self.assertEqual(self.slider.max_value(), 100.0)
def test_small_change(self):
"""Test Getting small change of slider's thumb"""
self.assertEqual(self.slider.small_change(), 0.1)
def test_large_change(self):
"""Test Getting large change of slider's thumb"""
self.assertEqual(self.slider.large_change(), 1.0)
def test_value(self):
"""Test getting current position of slider's thumb"""
self.assertEqual(self.slider.value(), 70.0)
def test_set_value(self):
"""Test setting position of slider's thumb"""
self.slider.set_value(24)
self.assertEqual(self.slider.value(), 24.0)
self.slider.set_value(33.3)
self.assertEqual(self.slider.value(), 33.3)
self.slider.set_value("75.4")
self.assertEqual(self.slider.value(), 75.4)
self.assertRaises(ValueError, self.slider.set_value, -1)
self.assertRaises(ValueError, self.slider.set_value, 102)
self.assertRaises(ValueError, self.slider.set_value, [50, ])
class ListViewWrapperTests(unittest.TestCase):
"""Unit tests for the ListViewWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
app = app.start(wpf_app_1)
dlg = app.WPFSampleApplication
self.app = app
self.listview_tab = dlg.Tree_and_List_Views
self.listbox_datagrid_tab = dlg.ListBox_and_Grid
self.listview_texts = [
[u"1", u"Tomatoe", u"Red"],
[u"2", u"Cucumber", u"Green", ],
[u"3", u"Reddish", u"Purple", ],
[u"4", u"Cauliflower", u"White", ],
[u"5", u"Cupsicum", u"Yellow", ],
[u"6", u"Cupsicum", u"Red", ],
[u"7", u"Cupsicum", u"Green", ],
]
self.listbox_texts = [
[u"TextItem 1", ],
[u"TextItem 2", ],
[u"ButtonItem", ],
[u"CheckItem", ],
[u"TextItem 3", ],
[u"TextItem 4", ],
[u"TextItem 5", ],
[u"TextItem 6", ],
[u"TextItem 7", ],
[u"TextItem 8", ],
]
self.datagrid_texts = [
[u"0", u"A0", u"B0", u"C0", u"D0", u"E0", u"", ],
[u"1", u"A1", u"B1", u"C1", u"D1", u"E1", u"", ],
[u"2", u"A2", u"B2", u"C2", u"D2", u"E2", u"", ],
[u"3", u"A3", u"B3", u"C3", u"D3", u"E3", u"", ],
]
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_friendly_class_name(self):
"""Test friendly class name of the ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
self.assertEqual(listview.friendly_class_name(), u"ListView")
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
self.assertEqual(listbox.friendly_class_name(), u"ListBox")
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
self.assertEqual(datagrid.friendly_class_name(), u"ListView")
def test_item_count(self):
"""Test the items count in the ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
self.assertEqual(listview.item_count(), len(self.listview_texts))
# ListBox
self.listbox_datagrid_tab.set_focus()
#listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
# self.assertEqual(listbox.item_count(), len(self.listbox_texts))
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
self.assertEqual(datagrid.item_count(), len(self.datagrid_texts))
def test_column_count(self):
"""Test the columns count in the ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
self.assertEqual(listview.column_count(), len(self.listview_texts[0]))
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
self.assertEqual(listbox.column_count(), 0)
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
self.assertEqual(datagrid.column_count(), len(self.datagrid_texts[0]) - 1)
def test_get_header_control(self):
"""Test getting a Header control and Header Item control of ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
hdr_ctl = listview.get_header_control()
self.assertTrue(isinstance(hdr_ctl, uia_ctls.HeaderWrapper))
# HeaderItem of ListView
hdr_itm = hdr_ctl.children()[2]
self.assertTrue(isinstance(hdr_itm, uia_ctls.HeaderItemWrapper))
self.assertTrue(hdr_itm.iface_transform.CurrentCanResize, True)
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
self.assertEqual(listbox.get_header_control(), None)
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
self.assertTrue(isinstance(datagrid.get_header_control(), uia_ctls.HeaderWrapper))
def test_get_column(self):
"""Test get_column() method for the ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
listview_col = listview.get_column(1)
self.assertEqual(listview_col.texts()[0], u"Name")
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
self.assertRaises(IndexError, listbox.get_column, 0)
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
datagrid_col = datagrid.get_column(2)
self.assertEqual(datagrid_col.texts()[0], u"B")
self.assertRaises(IndexError, datagrid.get_column, 10)
def test_cell(self):
"""Test getting a cell of the ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
cell = listview.cell(3, 2)
self.assertEqual(cell.window_text(), self.listview_texts[3][2])
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
cell = listbox.cell(10, 10)
self.assertEqual(cell, None)
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
cell = datagrid.cell(2, 0)
self.assertEqual(cell.window_text(), self.datagrid_texts[2][0])
self.assertRaises(TypeError, datagrid.cell, 1.5, 1)
self.assertRaises(IndexError, datagrid.cell, 10, 10)
def test_cells(self):
"""Test getting a cells of the ListView controls"""
def compare_cells(cells, control):
for i in range(0, control.item_count()):
for j in range(0, control.column_count()):
self.assertEqual(cells[i][j], control.cell(i, j))
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
compare_cells(listview.cells(), listview)
# DataGrid
self.listbox_datagrid_tab.set_focus()
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
compare_cells(datagrid.cells(), datagrid)
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
cells = listbox.cells()
self.assertEqual(cells[listbox.item_count() - 1][0].window_text(), "TextItem 7")
self.assertEqual(cells[3][0].window_text(), "CheckItem")
def test_get_item(self):
"""Test getting an item of ListView controls"""
# ListView
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
item = listview.get_item(u"Reddish")
self.assertEqual(item.texts(), self.listview_texts[2])
self.assertRaises(ValueError, listview.get_item, u"Apple")
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
item = listbox.get_item(u"TextItem 2")
self.assertEqual(item.texts(), self.listbox_texts[1])
item = listbox.get_item(3)
self.assertEqual(item.texts(), self.listbox_texts[3])
item = listbox.get_item(u"TextItem 8")
self.assertEqual(item.texts(), self.listbox_texts[9])
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
item = datagrid.get_item(u"B2")
self.assertEqual(item.texts(), self.datagrid_texts[2])
item = datagrid.get_item(3)
self.assertEqual(item.texts(), self.datagrid_texts[3])
self.assertRaises(TypeError, datagrid.get_item, 12.3)
def test_get_items(self):
"""Test getting all items of ListView controls"""
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
content = [item.texts() for item in listview.get_items()]
self.assertEqual(content, self.listview_texts)
# ListBox
self.listbox_datagrid_tab.set_focus()
listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
content = [item.texts() for item in listbox.get_items()]
# self.assertEqual(content, self.listbox_texts)
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
content = [item.texts() for item in datagrid.get_items()]
self.assertEqual(content, self.datagrid_texts)
def test_texts(self):
"""Test getting all items of ListView controls"""
self.listview_tab.set_focus()
listview = self.listview_tab.children(class_name=u"ListView")[0]
self.assertEqual(listview.texts(), self.listview_texts)
# ListBox
self.listbox_datagrid_tab.set_focus()
#listbox = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0]
# self.assertEqual(listbox.texts(), self.listbox_texts)
# DataGrid
datagrid = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0]
self.assertEqual(datagrid.texts(), self.datagrid_texts)
def test_select_and_get_item(self):
"""Test selecting an item of the ListView control"""
self.listview_tab.set_focus()
self.ctrl = self.listview_tab.children(class_name=u"ListView")[0]
# Verify get_selected_count
self.assertEqual(self.ctrl.get_selected_count(), 0)
# Select by an index
row = 1
i = self.ctrl.get_item(row)
self.assertEqual(i.is_selected(), False)
self.assertRaises(uia_defs.NoPatternInterfaceError, i.is_checked)
i.select()
self.assertEqual(i.is_selected(), True)
cnt = self.ctrl.get_selected_count()
self.assertEqual(cnt, 1)
rect = self.ctrl.get_item_rect(row)
self.assertEqual(rect, i.rectangle())
# Select by text
row = '3'
i = self.ctrl.get_item(row)
i.select()
self.assertEqual(i.is_selected(), True)
row = 'White'
i = self.ctrl.get_item(row)
i.select()
i = self.ctrl.get_item(3) # re-get the item by a row index
self.assertEqual(i.is_selected(), True)
row = None
self.assertRaises(TypeError, self.ctrl.get_item, row)
class ListViewWrapperTestsWinForms(unittest.TestCase):
"""Unit tests for the ListViewWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
self.app = Application(backend='uia').start(winfoms_app_grid)
self.dlg = self.app.Dialog
self.add_col_button = self.dlg.AddCol
self.add_row_button = self.dlg.AddRow
self.row_header_button = self.dlg.RowHeader
self.col_header_button = self.dlg.ColHeader
self.list_box = self.dlg.ListBox
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_list_box_item_selection(self):
"""Test get_item method"""
self.list_box.set_focus()
list_box_item = self.list_box.get_item('item (2)')
self.assertFalse(list_box_item.is_selected())
list_box_item.select()
self.assertTrue(list_box_item.is_selected())
def test_list_box_getitem_overload(self):
"""Test __getitem__ method"""
self.list_box.set_focus()
list_box_item = self.list_box['item (2)']
self.assertFalse(list_box_item.is_selected())
list_box_item.select()
self.assertTrue(list_box_item.is_selected())
def test_empty_grid(self):
"""Test some error cases handling"""
self.dlg.set_focus()
table = self.dlg.Table
self.assertEqual(len(table.cells()), 0)
self.assertRaises(IndexError, table.cell, 0, 0)
self.assertRaises(IndexError, table.get_item, 0)
def test_skip_headers(self):
"""Test some error cases handling"""
self.dlg.set_focus()
self.add_col_button.click()
table = self.dlg.Table
cells = table.cells()
self.assertEqual(len(cells), 1)
self.assertEqual(len(cells[0]), 1)
self.assertFalse(isinstance(cells[0][0], uia_ctls.HeaderWrapper))
def test_cell_and_cells_equals(self):
"""Test equivalence of cell and cells methods"""
def compare_cells():
table = self.dlg.Table
cells = table.cells()
self.assertEqual(len(cells), 3)
self.assertEqual(len(cells[0]), 2)
for row_ind in range(0, 3):
for col_ind in range(0, 2):
self.assertEqual(cells[row_ind][col_ind], table.cell(row_ind, col_ind))
self.add_col_button.click()
self.add_col_button.click()
self.add_row_button.click()
self.add_row_button.click()
compare_cells()
self.row_header_button.click()
compare_cells()
self.row_header_button.click()
self.col_header_button.click()
compare_cells()
def test_unsupported_columns(self):
"""Test raise NotImplemented errors for columns methods"""
self.dlg.set_focus()
table = self.dlg.Table
self.assertRaises(NotImplementedError, table.column_count)
self.assertRaises(NotImplementedError, table.get_column, 0)
def test_get_header_controls(self):
"""Test get header controls method"""
self.add_col_button.click()
table = self.dlg.Table
headers = table.get_header_controls()
self.assertEqual(len(headers), 3)
self.col_header_button.click()
headers = table.get_header_controls()
self.assertEqual(len(headers), 1)
self.row_header_button.click()
headers = table.get_header_controls()
self.assertEqual(len(headers), 0)
class MenuBarTestsWinForms(unittest.TestCase):
"""Unit tests for the MenuBar class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
self.app = Application(backend='uia').start(winfoms_app_grid)
self.dlg = self.app.Dialog
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_can_select_multiple_items(self):
"""Test menu_select multimple items with action"""
table = self.dlg.Table
cells = table.cells()
self.assertEqual(len(table.cells()), 0)
self.dlg.menu_select('#0 -> #1 -> #1 -> #0 -> #0 -> #4 ->#0')
cells = table.cells()
self.assertEqual(len(cells), 1)
self.assertEqual(len(cells[0]), 1)
def test_can_select_top_menu(self):
"""Test menu_select with single item"""
first_menu_item = self.dlg['menuStrip1'].children()[0]
point = first_menu_item.rectangle().mid_point()
child_from_point = self.dlg.from_point(point.x, point.y + 20)
self.assertEqual(child_from_point.element_info.name, 'Form1')
self.dlg.menu_select('tem1')
time.sleep(0.1)
child_from_point = self.dlg.from_point(point.x, point.y + 20)
self.assertEqual(child_from_point.element_info.name, 'tem1DropDown')
class EditTestsWinForms(unittest.TestCase):
"""Unit tests for the WinFormEdit class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
self.app = Application(backend='uia').start(winfoms_app_grid)
self.dlg = self.app.Dialog
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_readonly_and_editable_edits(self):
"""Test editable method for editable edit"""
self.assertEqual(self.dlg.Edit2.get_value(), "Editable")
self.assertTrue(self.dlg.Edit2.is_editable())
self.assertEqual(self.dlg.Edit1.get_value(), "ReadOnly")
self.assertFalse(self.dlg.Edit1.is_editable())
class ComboBoxTestsWinForms(unittest.TestCase):
"""Unit tests for the ComboBoxWrapper class with WinForms app"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
self.app = app.start(winfoms_app_grid)
self.dlg = dlg = app.Form1
self.combo_editable = dlg.child_window(auto_id="comboRowType", control_type="ComboBox").wrapper_object()
self.combo_fixed = dlg.child_window(auto_id="comboBoxReadOnly", control_type="ComboBox").wrapper_object()
self.combo_simple = dlg.child_window(auto_id="comboBoxSimple", control_type="ComboBox").wrapper_object()
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_expand_collapse(self):
"""Test methods .expand() and .collapse() for WinForms combo box"""
self.dlg.set_focus()
test_data = [(self.combo_editable, 'editable'), (self.combo_fixed, 'fixed'), (self.combo_simple, 'simple')]
for combo, combo_name in test_data:
if combo != self.combo_simple:
self.assertFalse(combo.is_expanded(),
msg='{} combo box must be collapsed initially'.format(combo_name))
# test that method allows chaining
self.assertEqual(combo.expand(), combo,
msg='Method .expand() for {} combo box must return self'.format(combo_name))
self.assertTrue(combo.is_expanded(),
msg='{} combo box has not been expanded!'.format(combo_name))
# .expand() keeps already expanded state (and still allows chaining)
self.assertEqual(combo.expand(), combo,
msg='Method .expand() for {} combo box must return self, always!'.format(combo_name))
self.assertTrue(combo.is_expanded(),
msg='{} combo box does NOT keep expanded state!'.format(combo_name))
# collapse
self.assertEqual(combo.collapse(), combo,
msg='Method .collapse() for {} combo box must return self'.format(combo_name))
if combo != self.combo_simple:
self.assertFalse(combo.is_expanded(),
msg='{} combo box has not been collapsed!'.format(combo_name))
# collapse already collapsed should keep collapsed state
self.assertEqual(combo.collapse(), combo,
msg='Method .collapse() for {} combo box must return self, always!'.format(combo_name))
if combo != self.combo_simple:
self.assertFalse(combo.is_expanded(),
msg='{} combo box does NOT keep collapsed state!'.format(combo_name))
def test_texts(self):
"""Test method .texts() for WinForms combo box"""
self.dlg.set_focus()
editable_texts = [u'Numbers', u'Letters', u'Special symbols']
fixed_texts = [u'Item 1', u'Item 2', u'Last Item']
simple_texts = [u'Simple 1', u'Simple Two', u'The Simplest']
self.assertEqual(self.combo_editable.texts(), editable_texts)
self.assertEqual(self.combo_editable.expand().texts(), editable_texts)
self.assertTrue(self.combo_editable.is_expanded())
self.combo_editable.collapse()
self.assertEqual(self.combo_fixed.texts(), fixed_texts)
self.assertEqual(self.combo_fixed.expand().texts(), fixed_texts)
self.assertTrue(self.combo_fixed.is_expanded())
self.combo_fixed.collapse()
self.assertEqual(self.combo_simple.texts(), simple_texts)
self.assertEqual(self.combo_simple.expand().texts(), simple_texts)
self.assertTrue(self.combo_simple.is_expanded())
self.combo_simple.collapse()
def test_select(self):
"""Test method .select() for WinForms combo box"""
self.dlg.set_focus()
self.combo_editable.select(u'Letters')
self.assertEqual(self.combo_editable.selected_text(), u'Letters')
self.assertEqual(self.combo_editable.selected_index(), 1)
self.combo_editable.select(2)
self.assertEqual(self.combo_editable.selected_text(), u'Special symbols')
self.assertEqual(self.combo_editable.selected_index(), 2)
self.combo_fixed.select(u'Last Item')
self.assertEqual(self.combo_fixed.selected_text(), u'Last Item')
self.assertEqual(self.combo_fixed.selected_index(), 2)
self.combo_fixed.select(1)
self.assertEqual(self.combo_fixed.selected_text(), u'Item 2')
self.assertEqual(self.combo_fixed.selected_index(), 1)
self.combo_simple.select(u'The Simplest')
self.assertEqual(self.combo_simple.selected_text(), u'The Simplest')
self.assertEqual(self.combo_simple.selected_index(), 2)
self.combo_simple.select(0)
self.assertEqual(self.combo_simple.selected_text(), u'Simple 1')
self.assertEqual(self.combo_simple.selected_index(), 0)
def test_select_errors(self):
"""Test errors in method .select() for WinForms combo box"""
self.dlg.set_focus()
for combo in [self.combo_editable, self.combo_fixed, self.combo_simple]:
self.assertRaises(IndexError, combo.select, u'FFFF')
self.assertRaises(IndexError, combo.select, 50)
def test_item_count(self):
"""Test method .item_count() for WinForms combo box"""
self.dlg.set_focus()
self.assertEqual(self.combo_editable.item_count(), 3)
self.assertEqual(self.combo_fixed.item_count(), 3)
self.assertEqual(self.combo_simple.item_count(), 3)
def test_from_point(self):
"""Test method .from_point() for WinForms combo box"""
self.dlg.set_focus()
x, y = self.combo_fixed.rectangle().mid_point()
combo_from_point = self.dlg.from_point(x, y)
self.assertEqual(combo_from_point, self.combo_fixed)
combo2_from_point = Desktop(backend="uia").from_point(x, y)
self.assertEqual(combo2_from_point, self.combo_fixed)
def test_top_from_point(self):
"""Test method .top_from_point() for WinForms combo box"""
dlg_wrapper = self.dlg.set_focus()
x, y = self.combo_fixed.rectangle().mid_point()
dlg_from_point = self.dlg.top_from_point(x, y)
self.assertEqual(dlg_from_point, dlg_wrapper)
dlg2_from_point = Desktop(backend="uia").top_from_point(x, y)
self.assertEqual(dlg2_from_point, dlg_wrapper)
class ListItemWrapperTests(unittest.TestCase):
"""Unit tests for the ListItemWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
app = Application(backend='uia')
app = app.start(wpf_app_1)
dlg = app.WPFSampleApplication
self.app = app
self.listview_tab = dlg.Tree_and_List_Views
self.listbox_datagrid_tab = dlg.ListBox_and_Grid
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_friendly_class_name(self):
"""Test getting friendly class name"""
# DataItem
self.listview_tab.set_focus()
listview_item = self.listview_tab.children(class_name=u"ListView")[0].get_item(2)
self.assertEqual(listview_item.friendly_class_name(), u"DataItem")
# ListBoxItem
self.listbox_datagrid_tab.set_focus()
listbox_item = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0].get_item(3)
self.assertEqual(listbox_item.friendly_class_name(), u"ListItem")
# DataGridRow
datagrid_row = self.listbox_datagrid_tab.children(class_name=u"DataGrid")[0].get_item(1)
self.assertEqual(datagrid_row.friendly_class_name(), u"DataItem")
def test_selection(self):
"""Test selection of ListItem"""
self.listview_tab.set_focus()
listview_item = self.listview_tab.children(class_name=u"ListView")[0].get_item(2)
self.assertFalse(listview_item.is_selected())
listview_item.select()
self.assertTrue(listview_item.is_selected())
def test_is_checked(self):
"""Test is_checked() method of ListItemWrapper"""
self.listbox_datagrid_tab.set_focus()
listbox_item = self.listbox_datagrid_tab.children(class_name=u"ListBox")[0].get_item(u"CheckItem")
self.assertRaises(uia_defs.NoPatternInterfaceError, listbox_item.is_checked)
def test_texts(self):
"""Test getting texts of ListItem"""
self.listview_tab.set_focus()
listview_item = self.listview_tab.children(class_name=u"ListView")[0].get_item(1)
texts = [u"2", u"Cucumber", u"Green"]
self.assertEqual(listview_item.texts(), texts)
class MenuWrapperWpfTests(unittest.TestCase):
"""Unit tests for the MenuWrapper class on WPF demo"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
self.app = Application(backend='uia')
self.app = self.app.start(wpf_app_1)
self.dlg = self.app.WPFSampleApplication
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_menu_by_index(self):
"""Test selecting a WPF menu item by index"""
path = "#0->#1->#1" # "File->Close->Later"
self.dlg.menu_select(path)
label = self.dlg.MenuLaterClickLabel.wrapper_object()
self.assertEqual(label.window_text(), u"MenuLaterClick")
# Non-existing paths
path = "#5->#1"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = "#0->#1->#1->#2->#3"
self.assertRaises(IndexError, self.dlg.menu_select, path)
def test_menu_by_exact_text(self):
"""Test selecting a WPF menu item by exact text match"""
path = "File->Close->Later"
self.dlg.menu_select(path, True)
label = self.dlg.MenuLaterClickLabel.wrapper_object()
self.assertEqual(label.window_text(), u"MenuLaterClick")
# A non-exact menu name
path = "File->About"
self.assertRaises(IndexError, self.dlg.menu_select, path, True)
def test_menu_by_best_match_text(self):
"""Test selecting a WPF menu item by best match text"""
path = "file-> close -> later"
self.dlg.menu_select(path, False)
label = self.dlg.MenuLaterClickLabel.wrapper_object()
self.assertEqual(label.window_text(), u"MenuLaterClick")
def test_menu_by_mixed_match(self):
"""Test selecting a WPF menu item by a path with mixed specifiers"""
path = "file-> #1 -> later"
self.dlg.menu_select(path, False)
label = self.dlg.MenuLaterClickLabel.wrapper_object()
self.assertEqual(label.window_text(), u"MenuLaterClick")
# Bad specifiers
path = "file-> 1 -> later"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = "#0->#1->1"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = "0->#1->1"
self.assertRaises(IndexError, self.dlg.menu_select, path)
class MenuWrapperNotepadTests(unittest.TestCase):
"""Unit tests for the MenuWrapper class on Notepad"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.defaults()
# start the application
self.app = Application(backend='uia')
self.app = self.app.start("notepad.exe")
self.dlg = self.app.UntitledNotepad
ActionLogger().log("MenuWrapperNotepadTests::setUp, wait till Notepad dialog is ready")
self.dlg.wait("ready")
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_friendly_class_name(self):
"""Test getting the friendly class name of the menu"""
menu = self.dlg.descendants(control_type="MenuBar")[0]
self.assertEqual(menu.friendly_class_name(), "Menu")
def test_menu_by_index(self):
"""Test selecting a menu item by index"""
path = "#4->#1" # "Help->About Notepad"
self.dlg.menu_select(path)
# 'About Notepad' dialog showed upon execution of menu_select
self.assertEqual(self.dlg.AboutNotepad.is_active(), True)
# menu_select rises the AttributeError when a dialog doesn't have menus
self.assertRaises(AttributeError, self.dlg.AboutNotepad.menu_select, "#10->#2")
self.dlg.AboutNotepad.close()
# A non-existing path
path = "#5->#1"
self.assertRaises(IndexError, self.dlg.menu_select, path)
# Get a menu item by index
menu = self.dlg.children(control_type="MenuBar")[0]
item = menu.item_by_index(4)
self.assertEqual(isinstance(item, uia_ctls.MenuItemWrapper), True)
self.assertEqual(item.window_text(), 'Help')
item.select()
item.close()
def test_is_dialog(self):
"""Test that method is_dialog() works as expected"""
self.assertEqual(self.dlg.is_dialog(), True)
self.assertEqual(self.dlg.Edit.is_dialog(), False)
def test_issue_532(self):
"""Test selecting a combobox item when it's wrapped in ListView"""
path = "Format -> Font"
self.dlg.menu_select(path)
combo_box = self.app.top_window().Font.ScriptComboBox.wrapper_object()
combo_box.select('Greek')
self.assertEqual(combo_box.selected_text(), 'Greek')
self.assertRaises(IndexError, combo_box.select, 'NonExistingScript')
def test_menu_by_exact_text(self):
"""Test selecting a menu item by exact text match"""
path = "Help->About Notepad"
self.dlg.menu_select(path, True)
self.assertEqual(self.dlg.AboutNotepad.is_dialog(), True)
self.dlg.AboutNotepad.close()
# A non-exact menu name
path = "help ->About Notepad"
self.assertRaises(IndexError, self.dlg.menu_select, path, True)
def test_menu_by_best_match_text(self):
"""Test selecting a Win32 menu item by best match text"""
path = "help->aboutnotepad"
self.dlg.menu_select(path, False)
self.dlg.AboutNotepad.close()
path = "Help ->about notepad "
self.dlg.menu_select(path, False)
self.dlg.AboutNotepad.close()
# Bad match
path = "HELP -> About Notepad"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = "help -> ABOUT NOTEPAD"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = "help -> # 2"
self.assertRaises(IndexError, self.dlg.menu_select, path)
def test_menu_by_mixed_match(self):
"""Test selecting a menu item by a path with mixed specifiers"""
path = "#4->aboutnotepad"
self.dlg.menu_select(path, False)
self.dlg.AboutNotepad.close()
# An index and the exact text match
path = "Help->#1"
self.dlg.menu_select(path, True)
self.dlg.AboutNotepad.close()
# An index and non-exact text match
path = "#4 -> about notepad "
self.dlg.menu_select(path, False)
self.dlg.AboutNotepad.close()
# Bad specifiers
path = "#0->#1->1"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = "0->#1->1"
self.assertRaises(IndexError, self.dlg.menu_select, path)
path = " -> #1 -> #2"
self.assertRaises(IndexError, self.dlg.menu_select, path)
class ToolbarWpfTests(unittest.TestCase):
"""Unit tests for ToolbarWrapper class on WPF demo"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
self.app = Application(backend='uia')
self.app = self.app.start(wpf_app_1)
self.dlg = self.app.WPFSampleApplication
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_button_access(self):
"""Test getting access to buttons on Toolbar of WPF demo"""
# Read a second toolbar with buttons: "button1, button2"
tb = self.dlg.Toolbar2.wrapper_object()
self.assertEqual(tb.button_count(), 5)
self.assertEqual(len(tb.texts()), 5)
# Test if it's in writable properties
props = set(tb.get_properties().keys())
self.assertEqual('button_count' in props, True)
expect_txt = "button 1"
self.assertEqual(tb.button(3).window_text(), expect_txt)
found_txt = tb.button(expect_txt, exact=True).window_text()
self.assertEqual(found_txt, expect_txt)
found_txt = tb.button("b 1", exact=False).window_text()
self.assertEqual(found_txt, expect_txt)
expect_txt = "button 2"
found_txt = tb.button(expect_txt, exact=True).window_text()
self.assertEqual(found_txt, expect_txt)
expect_txt = ""
btn = tb.button(expect_txt, exact=True)
found_txt = btn.window_text()
self.assertEqual(found_txt, expect_txt)
# Notice that findbestmatch.MatchError is subclassed from IndexError
self.assertRaises(IndexError, tb.button, "BaD n_$E ", exact=False)
class ToolbarNativeTests(unittest.TestCase):
"""Unit tests for ToolbarWrapper class on a native application"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.defaults()
self.app = Application(backend='uia')
self.app.start(os.path.join(mfc_samples_folder, u"RowList.exe"))
self.dlg = self.app.RowListSampleApplication
self.ctrl = self.dlg.ToolBar.wrapper_object()
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_tooltips(self):
"""Test working with tooltips"""
self.ctrl.set_focus()
self.ctrl.move_mouse_input(coords=(10, 10), absolute=False)
# Find a tooltip by class name
tt = self.app.window(top_level_only=False,
class_name="tooltips_class32").wait('visible')
self.assertEqual(isinstance(tt, uia_ctls.TooltipWrapper), True)
self.assertEqual(tt.window_text(), "Large Icons")
# Find a tooltip window by control type
tt = self.app.top_window().children(control_type='ToolTip')[0]
self.assertEqual(isinstance(tt, uia_ctls.TooltipWrapper), True)
self.assertEqual(tt.window_text(), "Large Icons")
def test_button_click(self):
"""Test button click"""
# Check the "Full Row Details" button
self.ctrl.check_button("Full Row Details", True)
lst_ctl = self.dlg.ListBox
itm = lst_ctl.children()[1]
self.assertEqual(itm.texts()[0], u'Yellow')
# Check the second time it shouldn't change
self.ctrl.check_button("Full Row Details", True)
self.assertEqual(itm.texts()[0], u'Yellow')
# Switch to another view
self.ctrl.check_button("Small Icons", True)
itm = lst_ctl.children()[1]
self.assertEqual(itm.texts()[0], u'Red')
class ToolbarMfcTests(unittest.TestCase):
"""Unit tests for ToolbarWrapper class on MFC demo"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
self.app = Application(backend='uia').start(mfc_app_rebar_test)
self.dlg = self.app.RebarTest
self.menu_bar = self.dlg.MenuBar.wrapper_object()
self.toolbar = self.dlg.StandardToolbar.wrapper_object()
self.window_edge_point = (self.dlg.rectangle().width() + 50, self.dlg.rectangle().height() + 50)
def tearDown(self):
"""Close the application after tests"""
self.menu_bar.move_mouse_input(coords=self.window_edge_point, absolute=False)
self.app.kill()
def test_button_access(self):
"""Test getting access to buttons on Toolbar for MFC demo"""
# Read a first toolbar with buttons: "File, View, Help"
self.assertEqual(self.menu_bar.button_count(), 4)
self.assertEqual(self.toolbar.button_count(), 11)
# Test if it's in writable properties
props = set(self.menu_bar.get_properties().keys())
self.assertEqual('button_count' in props, True)
self.assertEqual("File", self.menu_bar.button(0).window_text())
self.assertEqual("View", self.menu_bar.button(1).window_text())
self.assertEqual("Help", self.menu_bar.button(2).window_text())
found_txt = self.menu_bar.button("File", exact=True).window_text()
self.assertEqual("File", found_txt)
found_txt = self.menu_bar.button("File", exact=False).window_text()
self.assertEqual("File", found_txt)
def test_texts(self):
"""Test method .texts() for MFC Toolbar"""
self.assertEqual(self.menu_bar.texts(), [u'File', u'View', u'Help', u'Help'])
self.assertEqual(self.toolbar.texts(), [u'New', u'Open', u'Save', u'Save',
u'Cut', u'Copy', u'Paste', u'Paste', u'Print', u'About', u'About'])
class TreeViewWpfTests(unittest.TestCase):
"""Unit tests for TreeViewWrapper class on WPF demo"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
# start the application
self.app = Application(backend='uia')
self.app = self.app.start(wpf_app_1)
self.dlg = self.app.WPFSampleApplication
tab_itm = self.dlg.TreeAndListViews.set_focus()
self.ctrl = tab_itm.children(control_type="Tree")[0]
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
def test_tv_item_count_and_roots(self):
"""Test getting roots and a total number of items in TreeView"""
# By default the tree view on WPF demo is partially expanded
# with only 12 visible nodes
self.assertEqual(self.ctrl.item_count(), 12)
# Test if it's in writable properties
props = set(self.ctrl.get_properties().keys())
self.assertEqual('item_count' in props, True)
roots = self.ctrl.roots()
self.assertEqual(len(roots), 1)
self.assertEqual(roots[0].texts()[0], u'Date Elements')
sub_items = roots[0].sub_elements()
self.assertEqual(len(sub_items), 11)
self.assertEqual(sub_items[0].window_text(), u'Empty Date')
self.assertEqual(sub_items[-1].window_text(), u'Years')
expected_str = "Date Elements\n Empty Date\n Week\n Monday\n Tuesday\n Wednsday\n"
expected_str += " Thursday\n Friday\n Saturday\n Sunday\n Months\n Years\n"
self.assertEqual(self.ctrl.print_items(), expected_str)
def test_tv_item_select(self):
"""Test selecting an item from TreeView"""
# Find by a path with indexes
itm = self.ctrl.get_item((0, 2, 3))
self.assertEqual(itm.is_selected(), False)
# Select
itm.select()
self.assertEqual(itm.is_selected(), True)
# A second call to Select doesn't remove selection
itm.select()
self.assertEqual(itm.is_selected(), True)
itm = self.ctrl.get_item((0, 3, 2))
itm.ensure_visible()
self.assertEqual(itm.is_selected(), False)
coords = itm.children(control_type='Text')[0].rectangle().mid_point()
itm.click_input(coords=coords, absolute=True)
self.assertEqual(itm.is_selected(), True)
def test_tv_get_item(self):
"""Test getting an item from TreeView"""
# Find by a path with indexes
itm = self.ctrl.get_item((0, 2, 3))
self.assertEqual(isinstance(itm, uia_ctls.TreeItemWrapper), True)
self.assertEqual(itm.window_text(), u'April')
# Find by a path with strings
itm = self.ctrl.get_item('\\Date Elements\\Months\\April', exact=True)
self.assertEqual(isinstance(itm, uia_ctls.TreeItemWrapper), True)
self.assertEqual(itm.window_text(), u'April')
itm = self.ctrl.get_item('\\ Date Elements \\ months \\ april', exact=False)
self.assertEqual(isinstance(itm, uia_ctls.TreeItemWrapper), True)
self.assertEqual(itm.window_text(), u'April')
itm = self.ctrl.get_item('\\Date Elements', exact=False)
self.assertEqual(isinstance(itm, uia_ctls.TreeItemWrapper), True)
self.assertEqual(itm.window_text(), u'Date Elements')
# Try to find the last item in the tree hierarchy
itm = self.ctrl.get_item('\\Date Elements\\Years\\2018', exact=False)
self.assertEqual(isinstance(itm, uia_ctls.TreeItemWrapper), True)
self.assertEqual(itm.window_text(), u'2018')
itm = self.ctrl.get_item((0, 3, 3))
self.assertEqual(isinstance(itm, uia_ctls.TreeItemWrapper), True)
self.assertEqual(itm.window_text(), u'2018')
# Verify errors handling
self.assertRaises(uia_defs.NoPatternInterfaceError, itm.is_checked)
self.assertRaises(RuntimeError,
self.ctrl.get_item,
'Date Elements\\months',
exact=False)
self.assertRaises(IndexError,
self.ctrl.get_item,
'\\_X_- \\months',
exact=False)
self.assertRaises(IndexError,
self.ctrl.get_item,
'\\_X_- \\ months',
exact=True)
self.assertRaises(IndexError,
self.ctrl.get_item,
'\\Date Elements\\ months \\ aprel',
exact=False)
self.assertRaises(IndexError,
self.ctrl.get_item,
'\\Date Elements\\ months \\ april\\',
exact=False)
self.assertRaises(IndexError,
self.ctrl.get_item,
'\\Date Elements\\ months \\ aprel',
exact=True)
self.assertRaises(IndexError, self.ctrl.get_item, (0, 200, 1))
self.assertRaises(IndexError, self.ctrl.get_item, (130, 2, 1))
def test_tv_drag_n_drop(self):
"""Test moving an item with mouse over TreeView"""
# Make sure the both nodes are visible
self.ctrl.get_item('\\Date Elements\\weeks').collapse()
itm_from = self.ctrl.get_item('\\Date Elements\\Years')
itm_to = self.ctrl.get_item('\\Date Elements\\Empty Date')
itm_from.drag_mouse_input(itm_to)
# Verify that the item and its sub-items are attached to the new node
itm = self.ctrl.get_item('\\Date Elements\\Empty Date\\Years')
self.assertEqual(itm.window_text(), 'Years')
itm = self.ctrl.get_item((0, 0, 0, 0))
self.assertEqual(itm.window_text(), '2015')
itm = self.ctrl.get_item('\\Date Elements\\Empty Date\\Years')
itm.collapse()
itm_from = self.ctrl.get_item('\\Date Elements\\Empty Date\\Years')
itm_to = self.ctrl.get_item(r'\Date Elements\Months')
self.ctrl.drag_mouse_input(itm_to, itm_from)
itm = self.ctrl.get_item(r'\Date Elements\Months\Years')
self.assertEqual(itm.window_text(), 'Years')
# Error handling: drop on itself
self.assertRaises(AttributeError,
self.ctrl.drag_mouse_input,
itm_from, itm_from)
# Drag-n-drop by manually calculated absolute coordinates
itm_from = self.ctrl.get_item(r'\Date Elements\Months')
itm_from.collapse()
r = itm_from.rectangle()
coords_from = (int(r.left + (r.width() / 4.0)),
int(r.top + (r.height() / 2.0)))
r = self.ctrl.get_item(r'\Date Elements\Weeks').rectangle()
coords_to = (int(r.left + (r.width() / 4.0)),
int(r.top + (r.height() / 2.0)))
self.ctrl.drag_mouse_input(coords_to, coords_from)
itm = self.ctrl.get_item(r'\Date Elements\Weeks\Months')
self.assertEqual(itm.window_text(), 'Months')
class WindowWrapperTests(unittest.TestCase):
"""Unit tests for the UIAWrapper class for Window elements"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
_set_timings()
test_folder = os.path.join(os.path.dirname(os.path.dirname(
os.path.dirname(os.path.abspath(__file__)))), r"apps/MouseTester")
self.qt5_app = os.path.join(test_folder, "mousebuttons.exe")
# start the application
self.app = Application(backend='uia')
self.app = self.app.start(self.qt5_app)
self.dlg = self.app.MouseButtonTester.wrapper_object()
self.another_app = None
def tearDown(self):
"""Close the application after tests"""
self.app.kill()
if self.another_app:
self.another_app.kill()
self.another_app = None
def test_issue_443(self):
"""Test .set_focus() for window that is not keyboard focusable"""
self.dlg.minimize()
self.assertEqual(self.dlg.is_minimized(), True)
self.dlg.set_focus()
self.assertEqual(self.dlg.is_minimized(), False)
self.assertEqual(self.dlg.is_normal(), True)
# run another app instance (in focus now)
self.another_app = Application(backend="win32").start(self.qt5_app)
# eliminate clickable point at original app by maximizing second window
self.another_app.MouseButtonTester.maximize()
self.another_app.MouseButtonTester.set_focus()
self.assertEqual(self.another_app.MouseButtonTester.has_focus(), True)
self.dlg.set_focus()
# another app instance has lost focus
self.assertEqual(self.another_app.MouseButtonTester.has_focus(), False)
# our window has been brought to the focus (clickable point exists)
self.assertEqual(self.dlg.element_info.element.GetClickablePoint()[-1], 1)
if __name__ == "__main__":
if UIA_support:
unittest.main()
| airelil/pywinauto | pywinauto/unittests/test_uiawrapper.py | Python | bsd-3-clause | 89,050 | 0.002223 |
# PyeNalysis
__author__ = "Edwin Dalmaijer"
import copy
import numpy
from scipy.interpolate import interp1d
# DEBUG #
#from matplotlib import pyplot
# # # # #
def interpolate_blink(signal, mode='auto', velthresh=5, maxdur=500, margin=10, invalid=-1, edfonly=False):
"""Returns signal with interpolated results, based on a cubic or linear
interpolation of all blinks detected in the signal; based on:
https://github.com/smathot/exparser/blob/master/exparser/TraceKit.py
arguments
signal -- a vector (i.e. a NumPy array) containing a single
trace of your signal; alternatively a trial gaze data
dict as is returned by edfreader can be passed; in this
case the blink ending events will be used to find blinks
before the pupil size velocity algorithm will be used
(NOTE: this means both will be used successively!)
keyword arguments
mode -- string indicating what kind of interpolation to use:
'linear' for a linear interpolation
'cubic' for a cubic interpolation
'auto' for a cubic interpolation is possible (i.e.
when more than four data points are available)
and linear when this is not the case
(default = 'auto')
velthresh -- pupil size change velocity threshold in arbitrary
units per sample (default = 5)
maxdur -- maximal duration of the blink in samples
(default = 500)
margin -- margin (in samples) to compensate for blink duration
underestimatiom; blink is extended for detected start
minus margin, and detected end plus margin
(default = 10)
edfonly -- Boolean indicating whether blinks should ONLY be
detected using the EDF logs and NOT algorithmically
returns
signal -- a NumPy array containing the interpolated signal
"""
# # # # #
# input errors
# wrong interpolation method
if mode not in ['auto','linear','cubic']:
raise Exception("Error in pyenalysis.interpolate_missing: mode '%s' is not supported, please use one of the following: 'auto','linear','cubic'" % mode)
# wrong signal dimension
if type(signal) != dict:
if signal.ndim != 1:
raise Exception("Error in pyenalysis.interpolate_missing: input is not a single signal trace, but has %d dimensions; please provide a 1-dimension array" % signal.ndim)
# # # # #
# find blinks
# empty lists, to store blink starts and endings
starts = []
ends = []
# edfreader data
if type(signal) == dict:
# loop through blinks
for st, et, dur in signal['events']['Eblk']: # Eblk - list of lists, each containing [starttime, endtime, duration]
# edf time to sample number
st = numpy.where(signal['edftime']==st)[0]
et = numpy.where(signal['edftime']==et)[0]
# if the starting or ending time did not appear in the trial,
# correct the blink starting or ending point to the first or
# last sample, respectively
if len(st) == 0:
st = 0
else:
st = st[0]
if len(et) == 0:
et = len(signal['edftime'])
else:
et = et[0]
# compensate for underestimation of blink duration
if st-margin >= 0:
st -= margin
if et+margin < len(signal['size']):
et += margin
# do not except blinks that exceed maximal blink duration
if et-st <= maxdur:
# append start time and ending time
starts.append(st)
ends.append(et)
# extract pupil size data from signal
signal = signal['size']
if not edfonly:
# signal in NumPy array
# create a velocity profile of the signal
vprof = signal[1:]-signal[:-1]
# start detection
ifrom = 0
while True:
# blink onset is detected when pupil size change velocity exceeds
# threshold
l = numpy.where(vprof[ifrom:] < -velthresh)[0]
# break when no blink start is detected
if len(l) == 0:
break
# blink start index
istart = l[0]+ifrom
if ifrom == istart:
break
# reversal (opening of the eye) is detected when pupil size
# starts to increase with a super-threshold velocity
l = numpy.where(vprof[istart:] > velthresh)[0]
# if no reversal is detected, start detection process at istart
# next run
if len(l) == 0:
ifrom = istart
# reloop
continue
# index number of somewhat halfway blink process
imid = l[0] + istart
# a blink ending is detected when pupil size increase velocity
# falls back to zero
l = numpy.where(vprof[imid:] < 0)[0]
# if no ending is detected, start detection process from imid
# next run
if len(l) == 0:
ifrom = imid
# reloop
continue
# blink end index
iend = l[0]+imid
# start detection process from current blink ending next run
ifrom = iend
# compensate for underestimation of blink duration
if istart-margin >= 0:
istart -= margin
if iend+margin < len(signal):
iend += margin
# do not except blinks that exceed maximal blink duration
if iend-istart > maxdur:
# reloop
continue
# if all is well, we append start and ending to their respective
# lists
starts.append(istart)
ends.append(iend)
# # DEBUG #
# pyplot.figure()
# pyplot.title("" % ())
# pyplot.plot(signal,'ko')
# pyplot.plot(vprof,'b')
# # # # # #
# # # # #
# interpolate
# loop through all starting and ending positions
for i in range(len(starts)):
# empty list to store data points for interpolation
pl = []
# duration in samples
duration = ends[i]-starts[i]
# starting point
if starts[i] - duration >= 0:
pl.extend([starts[i]-duration])
# central points (data between these points will be replaced)
pl.extend([starts[i],ends[i]])
# ending point
if ends[i] + duration < len(signal):
pl.extend([ends[i]+duration])
# choose interpolation type
if mode == 'auto':
# if our range is wide enough, we can interpolate cubicly
if len(pl) >= 4:
kind = 'cubic'
# if not, we use a linear interpolation
else:
kind = 'linear'
else:
kind = mode[:]
# select values for interpolation function
x = numpy.array(pl)
y = signal[x]
# replace any invalid values with trial average
y[y==invalid] = numpy.mean(signal[signal!=invalid])
# create interpolation function
intfunc = interp1d(x,y,kind=kind)
# do interpolation
xint = numpy.arange(starts[i],ends[i])
yint = intfunc(xint)
# insert interpolated values into signal
signal[xint] = yint
# # DEBUG #
# y = numpy.zeros(len(pl)) + max(signal)
# pyplot.plot(pl,y,'ro')
# pyplot.plot(signal,'r')
# # # # # #
return signal
def interpolate_missing(signal, mode='auto', mindur=5, margin=10, invalid=-1):
"""Returns signal with interpolated results, based on a cubic or linear
interpolation of the invalid data in the signal
arguments
signal -- a vector (i.e. a NumPy array) containing a single
trace of your signal
keyword arguments
mode -- string indicating what kind of interpolation to use:
'linear' for a linear interpolation
'cubic' for a cubic interpolation
'auto' for a cubic interpolation is possible (i.e.
when more than four data points are available)
and linear when this is not the case
(default = 'auto')
mindur -- minimal amount of consecutive samples to interpolate
cubically; otherwise a linear interpolation is used;
this is to prevent weird results in the interpolation
of very short strings of missing data (default = 5)
margin -- margin (in samples) to compensate for missing duration
underestimatiom; missing is extended for detected start
minus margin, and detected end plus margin; this helps
in reducing errors in blink interpolation that has not
been done by interpolate_blink (default = 10)
invalid -- a single value coding for invalid data, e.g. -1 or 0.0
(default = -1)
returns
signal -- a NumPy array containing the interpolated signal
"""
# # # # #
# input errors
# wrong interpolation method
if mode not in ['auto','linear','cubic']:
raise Exception("Error in pyenalysis.interpolate_missing: mode '%s' is not supported, please use one of the following: 'auto','linear','cubic'" % mode)
# wrong signal dimension
if signal.ndim != 1:
raise Exception("Error in pyenalysis.interpolate_missing: input is not a single signal trace, but has %d dimensions; please provide a 1-dimension array" % signal.ndim)
# # # # #
# find successive strings of missing data
# empty lists for starting and ending indexes
starts = []
ends = []
# check if beginning sample is missing, and add to starting indexes if
# needed (algorithm does not pick up changes before the start or after
# the end if the signal)
if signal[0] == invalid:
starts.append(0)
si = 1
else:
si = 0
# find invalid data
inval = signal == invalid
# code connected strings of missing data 1
# (by substracting the previous number from the current, for every
# missing data index number: this will produce a value of 1 for
# successive index numbers, and higher values for nonsuccessive ones)
diff = numpy.diff(inval)
# find out what the index numbers of changes are
# (i.e.: every difference that is 1)
changes = numpy.where(diff==True)[0]
# loop through changes, finding start and begining index numbers for
# strings of successive missings
for i in range(si,len(changes),2):
ns = changes[i]-margin
if ns < 0:
ns = 0
starts.append(ns)
for i in range(1-si,len(changes),2):
ne = changes[i]+1+margin
if ne >= len(signal):
ne = len(signal)-1
ends.append(ne)
# if the signal ended on an invalid sample, add the ending index number
if signal[-1] == invalid:
ends.append(len(signal)-1)
# # # # #
# interpolate
# correct start and end point if these are invalid, by replacing them
# with the trial average
if signal[0] == invalid:
signal[0] = numpy.mean(signal[signal != invalid])
if signal[-1] == invalid:
signal[-1] = numpy.mean(signal[signal != invalid])
# loop through all starting and ending positions
for i in range(len(starts)):
# empty list to store data points for interpolation
pl = []
# duration in samples
duration = ends[i]-starts[i]
# starting point
if starts[i] - duration >= 0 and signal[starts[i]-duration] != invalid:
pl.extend([starts[i]-duration])
# central points (data between these points will be replaced)
pl.extend([starts[i],ends[i]])
# ending point
if ends[i] + duration < len(signal) and signal[ends[i]+duration] != invalid:
pl.extend([ends[i]+duration])
# if the duration is too low, use linear interpolation
if duration < mindur:
kind = 'linear'
# if the duration is long enough, choose interpolation type
else:
if mode == 'auto':
# if our range is wide enough, we can interpolate cubicly
if len(pl) >= 4:
kind = 'cubic'
# if not, we use a linear interpolation
else:
kind = 'linear'
else:
kind = mode[:]
# create interpolation function
x = numpy.array(pl)
y = signal[x]
intfunc = interp1d(x,y,kind=kind)
# do interpolation
xint = numpy.arange(starts[i],ends[i])
yint = intfunc(xint)
# insert interpolated values into signal
signal[xint] = yint
return signal
def remove_outliers(signal, maxdev=2.5, invalid=-1, interpolate=True, mode='auto', allowp=0.1):
"""Replaces every outlier with a missing value, then interpolates
missing values using pyenalysis.interpolate_missing
arguments
signal -- a vector (i.e. a NumPy array) containing a single
trace of your signal
keyword arguments
maxdev -- maximal distance between a single sample and the
signal average in standard deviations (default = 2.5)
invalid -- a single value coding for invalid data, e.g. -1 or 0.0;
outliers will be replaced by this value (default = -1)
interpolate -- Boolean indicating whether outliers should be
should be interpolated (True) or replaced by missing
values (False) (default = True)
mode -- string indicating what kind of interpolation to use:
'linear' for a linear interpolation
'cubic' for a cubic interpolation
'auto' for a cubic interpolation is possible (i.e.
when more than four data points are available)
and linear when this is not the case
(default = 'auto')
allowp -- is the standard deviation is below this proportion of
the mean, outliers will not be removed; this is to
prevent erroneous removal of outliers in a very steady
signal (default = 0.1)
returns
signal -- signal with outliers replaced by missing or
interpolated (depending on interpolate keyword
argument)
"""
# # # # #
# input errors
# wrong interpolation method
if mode not in ['auto','linear','cubic']:
raise Exception("Error in pyenalysis.interpolate_missing: mode '%s' is not supported, please use one of the following: 'auto','linear','cubic'" % mode)
# wrong signal dimension
if signal.ndim != 1:
raise Exception("Error in pyenalysis.interpolate_missing: input is not a single signal trace, but has %d dimensions; please provide a 1-dimension array" % signal.ndim)
# # # # #
# outlier removal
# calculate signal average and standard deviation
mean = numpy.mean(signal)
sd = numpy.std(signal)
# stop if SD is too low
if sd < mean*allowp:
return signal
# calculate bounds
lower = mean - maxdev*sd
upper = mean + maxdev*sd
# find outliers
outlier = (signal > upper) | (signal < lower)
# replace outliers by invalid code
signal[outlier] = invalid
# interpolate
if interpolate:
signal = interpolate_missing(signal, mode=mode, invalid=invalid)
return signal
def hampel(signal, winlen=12, T=3, focus='centre'):
"""Performs a Hampel filtering, a median based outlier rejection in which
outliers are detected based on a local median, and are replaced by that
median (local median is determined in a moving window)
arguments
signal -- a vector (i.e. a NumPy array) containing a single
trace of your signal
keyword arguments
winlen -- integer indicating window length (default = 12)
T -- floating point or integer indicating the maximal
distance from the surrounding signal that defines
outliers; distance is measured in a standard deviation
like measure (S0), based on the local median; a T of 3
means that any point outside of the range -3*S0 to 3*S0
is considered an outlier (default = 3)
focus -- string indicating where the focus (i.e. the point that
is being corrected) of the window should be; one of:
'centre' (window = winlen/2 + i + winlen/2)
'left' '(window = i + winlen)
'right' (window = winlen + i)
"""
if focus == 'centre':
# half a window length
hampwinlen = winlen/2
for i in range(hampwinlen, len(signal)-hampwinlen+1):
# median for this window
med = numpy.median(signal[i-hampwinlen:i+hampwinlen])
# check S0 (standard deviation like measure)
s0 = 1.4826 * numpy.median(numpy.abs(signal[i-hampwinlen:i+hampwinlen] - med))
# check outliers
if signal[i] > T*s0 or signal[i] < T*s0:
# replace outliers by median
signal[i] = med
# if the focus is not the centre
else:
# determine the starting position
if focus == 'left':
start = 0
stop = len(signal) - winlen
elif focus == 'right':
start = winlen
stop = len(signal)
else:
start = winlen/2
stop = len(signal) - winlen/2 + 1
# loop through samples
for i in range(start, stop):
# determine window start and stop
if focus == 'left':
wstart = i
wstop = i + winlen
elif focus == 'right':
wstart = i - winlen
wstop = i
else:
wstart = i - winlen/2
wstop = i + winlen/2
# median for this window
med = numpy.median(signal[wstart:wstop])
# check S0 (standard deviation like measure)
s0 = 1.4826 * numpy.median(numpy.abs(signal[wstart:wstop] - med))
# check outliers
if signal[i] > T*s0 or signal[i] < T*s0:
# replace outliers by median
signal[i] = copy.deepcopy(med)
return signal
def smooth(signal, winlen=11, window='hanning', lencorrect=True):
"""Smooth a trace, based on: http://wiki.scipy.org/Cookbook/SignalSmooth
arguments
signal -- a vector (i.e. a NumPy array) containing a single
trace of your signal
keyword arguments
winlen -- integer indicating window length (default = 11)
window -- smoothing type, should be one of the following:
'flat', 'hanning', 'hamming', 'bartlett', or 'blackman'
(default = 'hanning')
lencorrect -- Boolean indicating if the output (the smoothed signal)
should have the same length as the input (the raw
signal); this is not necessarily so because of data
convolution (default = True)
returns
signal -- smoothed signal
"""
# # # # #
# input errors
# really small window
if winlen < 3:
return signal
# non-integer window length
if type(winlen) != int:
try:
winlen = int(winlen)
except:
raise Exception("Error in pyenalysis.smooth: provided window length ('%s') is not compatible; please provide an integer window length" % winlen)
# wrong type of window
if window not in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']:
raise Exception("Error in pyenalysis.smooth: windowtype '%s' is not supported; please use one of the following: 'flat', 'hanning', 'hamming', 'bartlett', or 'blackman'" % window)
# wrong signal dimension
if signal.ndim != 1:
raise Exception("Error in pyenalysis.smooth: input is not a single signal trace, but has %d dimensions; please provide a 1-dimension array" % signal.ndim)
# too small a trace
if signal.size < winlen:
raise Exception("Error in pyenalysis.smooth: input signal has too few datapoints (%d) for provided window length (%d)" % (signal.size,winlen))
# # # # #
# smoothing
# slice to concatenation
s = numpy.r_[signal[winlen-1:0:-1],signal,signal[-1:-winlen:-1]]
# this is equivalent to:
# p1 = signal[winlen-1:0:-1].tolist() # first part of signal reversed
# p2 = signal.tolist()
# p3 = signal[-1:-winlen:-1].tolist() # last part of signal reversed
# s = p1 + p2 + p3
# moving average
if window == 'flat':
w = numpy.ones(winlen, 'd')
# bit more sophisticated smoothing algorithms
else:
w = eval("numpy.%s(%d)" % (window,winlen))
# convolve signal, according to chosen smoothing type
smoothed = numpy.convolve(w/w.sum(), s, mode='valid')
# correct length if necessary
if lencorrect:
smoothed = smoothed[(winlen/2-1):(-winlen/2)]
try:
smoothed = smoothed[:len(signal)]
except:
raise Exception("Error in pyenalysis.smooth: output array is too short (len(output)=%d, len(signal)=%d)" % (len(smoothed),len(signal)))
return smoothed
# DEBUG #
if __name__ == '__main__':
from matplotlib import pyplot
# constants
N = 200
INVAL = -1
# create random data
a = numpy.random.random_sample(N)
# manipulate radom data to look like somewhat realictic data
a = 10 + a*5
# introduce some missing values
a[0:10] = INVAL
a[50:66] = INVAL
a[150:190] = INVAL
a[-1] = INVAL
# introduce ouliers
for i in [15,16,18,40,197]:
a[i] = a[i] + numpy.random.random()*30
# plot the raw data
pyplot.figure()
pyplot.plot(a,'ko', label='raw')
# smooth the data
# a = smooth(a,winlen=5,lencorrect=True)
# plot the smoothed data
# pyplot.plot(a,'y', label='pre-smooth')
# interpolate 'blinks' (artificial, due to smoothing of fake data and missing)
# a = interpolate_blink(a, mode='auto', velthresh=5, maxdur=500, margin=10)
# plot interpolated data
# pyplot.plot(a,'b', label='blinks_interpolated')
# interpolate missing data
a = interpolate_missing(a,mode='linear',invalid=INVAL)
# plot interpolated data
pyplot.plot(a,'g', label='missing_interpolated')
# remove outliers
a = remove_outliers(a, maxdev=5, invalid=INVAL, interpolate=True, mode='auto')
# plot data without outliers
pyplot.plot(a,'m', label='outliers_removed')
# smooth the data
a = smooth(a,winlen=5,window='hanning',lencorrect=True)
# plot the smoothed data
pyplot.plot(a,'r', label='smooth')
# finish the plot
pyplot.legend(loc='upper right')
pyplot.show()
# # # # # | esdalmaijer/PyGazeAnalyser | pygazeanalyser/traces.py | Python | gpl-3.0 | 19,865 | 0.037352 |
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
import miscClin
import tsvIO
import sys
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
remapDict = {}
DAYS_PER_YEAR = 365.2425
# ------------------------------------------------------------------------- ##
# DEPENDING ON WHICH TUMOR TYPE IS BEING PROCESSED, THESE SWITCHES MAY
# NEED TO BE CHANGED ...
remapDict["anatomic_organ_subdivision"] = {}
if (1):
remapDict["anatomic_organ_subdivision"]["na"] = "NA"
remapDict["anatomic_organ_subdivision"]["rectum"] = 0
remapDict["anatomic_organ_subdivision"]["rectosigmoid_junction"] = 1
remapDict["anatomic_organ_subdivision"]["sigmoid_colon"] = 2
remapDict["anatomic_organ_subdivision"]["descending_colon"] = 3
remapDict["anatomic_organ_subdivision"]["splenic_flexure"] = 4
remapDict["anatomic_organ_subdivision"]["transverse_colon"] = 5
remapDict["anatomic_organ_subdivision"]["hepatic_flexure"] = 6
remapDict["anatomic_organ_subdivision"]["ascending_colon"] = 7
remapDict["anatomic_organ_subdivision"]["cecum"] = 8
if (0):
remapDict["anatomic_organ_subdivision"]["na"] = "NA"
remapDict["anatomic_organ_subdivision"]["bilateral"] = "bilateral"
remapDict["anatomic_organ_subdivision"]["left"] = "left"
remapDict["anatomic_organ_subdivision"]["right"] = "right"
if (0):
remapDict["anatomic_organ_subdivision"][""] = "NA"
remapDict["anatomic_organ_subdivision"]["na"] = "NA"
remapDict["anatomic_organ_subdivision"]["brain"] = "brain"
remapDict["histological_type"] = {}
if (0):
remapDict["histological_type"]["na"] = "NA"
remapDict["histological_type"]["colon_adenocarcinoma"] = 0
remapDict["histological_type"]["rectal_adenocarcinoma"] = 0
remapDict["histological_type"]["colon_mucinous_adenocarcinoma"] = 1
remapDict["histological_type"]["rectal_mucinous_adenocarcinoma"] = 1
if (0):
remapDict["histological_type"]["na"] = "NA"
remapDict["histological_type"][
"untreated_primary_(de_novo)_gbm"] = "de_novo"
remapDict["histological_type"]["treated_primary_gbm"] = "primary"
remapDict["ethnicity"] = {}
remapDict["ethnicity"]["hispanic_or_latino"] = "hispanic"
remapDict["ethnicity"]["not_hispanic_or_latino"] = "not_hispanic"
# ------------------------------------------------------------------------- ##
remapDict["tumor_grade"] = {}
remapDict["tumor_grade"]["na"] = "NA"
remapDict["tumor_grade"]["gx"] = "NA"
remapDict["tumor_grade"]["gb"] = "NA"
remapDict["tumor_grade"]["g1"] = 1
remapDict["tumor_grade"]["g2"] = 2
remapDict["tumor_grade"]["g3"] = 3
remapDict["tumor_grade"]["g4"] = 4
remapDict["tumor_grade"]["high grade"] = 3 # ???
remapDict["tumor_grade"]["high_grade"] = 3 # ???
if (0):
remapDict["tumor_stage"] = {}
remapDict["tumor_stage"]["na"] = "NA"
remapDict["tumor_stage"]["i"] = 1
remapDict["tumor_stage"]["ia"] = 1.2
remapDict["tumor_stage"]["ib"] = 1.4
remapDict["tumor_stage"]["ic"] = 1.6
remapDict["tumor_stage"]["ii"] = 2
remapDict["tumor_stage"]["iia"] = 2.2
remapDict["tumor_stage"]["iib"] = 2.4
remapDict["tumor_stage"]["iic"] = 2.6
remapDict["tumor_stage"]["iii"] = 3
remapDict["tumor_stage"]["iiia"] = 3.2
remapDict["tumor_stage"]["iiib"] = 3.4
remapDict["tumor_stage"]["iiic"] = 3.6
remapDict["tumor_stage"]["iv"] = 4
remapDict["tumor_stage"]["iva"] = 4.2
remapDict["tumor_stage"]["ivb"] = 4.4
remapDict["tumor_stage"]["ivc"] = 4.6
remapDict["breast_tumor_pathologic_grouping_stage"] = {}
remapDict["breast_tumor_pathologic_grouping_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_grouping_stage"]["x"] = "NA"
remapDict["breast_tumor_pathologic_grouping_stage"]["tis"] = 0.5
remapDict["breast_tumor_pathologic_grouping_stage"]["i"] = 1
remapDict["breast_tumor_pathologic_grouping_stage"]["ia"] = 1.2
remapDict["breast_tumor_pathologic_grouping_stage"]["ib"] = 1.4
remapDict["breast_tumor_pathologic_grouping_stage"]["ii"] = 2
remapDict["breast_tumor_pathologic_grouping_stage"]["iia"] = 2.2
remapDict["breast_tumor_pathologic_grouping_stage"]["iib"] = 2.4
remapDict["breast_tumor_pathologic_grouping_stage"]["iic"] = 2.6
remapDict["breast_tumor_pathologic_grouping_stage"]["iii"] = 3
remapDict["breast_tumor_pathologic_grouping_stage"]["iiia"] = 3.2
remapDict["breast_tumor_pathologic_grouping_stage"]["iiib"] = 3.4
remapDict["breast_tumor_pathologic_grouping_stage"]["iiic"] = 3.6
remapDict["breast_tumor_pathologic_grouping_stage"]["iv"] = 4
remapDict["primary_tumor_pathologic_spread"] = {}
remapDict["primary_tumor_pathologic_spread"]["na"] = "NA"
remapDict["primary_tumor_pathologic_spread"]["tx"] = "NA"
remapDict["primary_tumor_pathologic_spread"]["t0"] = 0
remapDict["primary_tumor_pathologic_spread"]["tis"] = 0.5
remapDict["primary_tumor_pathologic_spread"]["t1"] = 1
remapDict["primary_tumor_pathologic_spread"]["t1a"] = 1.2
remapDict["primary_tumor_pathologic_spread"]["t1b"] = 1.4
remapDict["primary_tumor_pathologic_spread"]["t2"] = 2
remapDict["primary_tumor_pathologic_spread"]["t2a"] = 2.2
remapDict["primary_tumor_pathologic_spread"]["t2b"] = 2.4
remapDict["primary_tumor_pathologic_spread"]["t3"] = 3
remapDict["primary_tumor_pathologic_spread"]["t3a"] = 3.2
remapDict["primary_tumor_pathologic_spread"]["t3b"] = 3.4
remapDict["primary_tumor_pathologic_spread"]["t3c"] = 3.6
remapDict["primary_tumor_pathologic_spread"]["t4"] = 4
remapDict["primary_tumor_pathologic_spread"]["t4a"] = 4.2
remapDict["primary_tumor_pathologic_spread"]["t4b"] = 4.4
remapDict["breast_tumor_pathologic_t_stage"] = {}
remapDict["breast_tumor_pathologic_t_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_t_stage"]["tx"] = "NA"
remapDict["breast_tumor_pathologic_t_stage"]["t1"] = 1
remapDict["breast_tumor_pathologic_t_stage"]["t1a"] = 1.2
remapDict["breast_tumor_pathologic_t_stage"]["t1b"] = 1.4
remapDict["breast_tumor_pathologic_t_stage"]["t1c"] = 1.6
remapDict["breast_tumor_pathologic_t_stage"]["t2"] = 2
remapDict["breast_tumor_pathologic_t_stage"]["t2a"] = 2.2
remapDict["breast_tumor_pathologic_t_stage"]["t2b"] = 2.4
remapDict["breast_tumor_pathologic_t_stage"]["t2c"] = 2.6
remapDict["breast_tumor_pathologic_t_stage"]["t3"] = 3
remapDict["breast_tumor_pathologic_t_stage"]["t3a"] = 3.4
remapDict["breast_tumor_pathologic_t_stage"]["t3b"] = 3.4
remapDict["breast_tumor_pathologic_t_stage"]["t3c"] = 3.6
remapDict["breast_tumor_pathologic_t_stage"]["t4"] = 4
remapDict["breast_tumor_pathologic_t_stage"]["t4a"] = 4.2
remapDict["breast_tumor_pathologic_t_stage"]["t4b"] = 4.4
remapDict["breast_tumor_pathologic_t_stage"]["t4c"] = 4.6
remapDict["breast_tumor_pathologic_t_stage"]["t4d"] = 4.8
remapDict["breast_carcinoma_estrogen_receptor_status"] = {}
remapDict["breast_carcinoma_estrogen_receptor_status"]["na"] = "NA"
remapDict["breast_carcinoma_estrogen_receptor_status"]["not_performed"] = "NA"
remapDict["breast_carcinoma_estrogen_receptor_status"][
"performed_but_not_available"] = "NA"
remapDict["breast_carcinoma_estrogen_receptor_status"][
"indeterminate"] = "indeterminate"
remapDict["breast_carcinoma_estrogen_receptor_status"]["positive"] = "positive"
remapDict["breast_carcinoma_estrogen_receptor_status"]["negative"] = "negative"
remapDict["lymphnode_pathologic_spread"] = {}
remapDict["lymphnode_pathologic_spread"]["na"] = "NA"
remapDict["lymphnode_pathologic_spread"]["nx"] = "NA"
remapDict["lymphnode_pathologic_spread"]["n0"] = 0
remapDict["lymphnode_pathologic_spread"]["n1"] = 1
remapDict["lymphnode_pathologic_spread"]["n1a"] = 1.2
remapDict["lymphnode_pathologic_spread"]["n1b"] = 1.4
remapDict["lymphnode_pathologic_spread"]["n1c"] = 1.6
remapDict["lymphnode_pathologic_spread"]["n2"] = 2
remapDict["lymphnode_pathologic_spread"]["n2a"] = 2.2
remapDict["lymphnode_pathologic_spread"]["n2b"] = 2.4
remapDict["lymphnode_pathologic_spread"]["n2c"] = 2.6
remapDict["lymphnode_pathologic_spread"]["n3"] = 3
remapDict["lymphnode_pathologic_spread"]["n3a"] = 3.2
remapDict["breast_tumor_pathologic_n_stage"] = {}
remapDict["breast_tumor_pathologic_n_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["pnx"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["pn0"] = 0
remapDict["breast_tumor_pathologic_n_stage"]["pn0(i-)"] = 0.2
remapDict["breast_tumor_pathologic_n_stage"]["pn0(i+)"] = 0.4
remapDict["breast_tumor_pathologic_n_stage"]["pn1"] = 1
remapDict["breast_tumor_pathologic_n_stage"]["pn1mi"] = 1.1
remapDict["breast_tumor_pathologic_n_stage"]["pn1a"] = 1.2
remapDict["breast_tumor_pathologic_n_stage"]["pn1b"] = 1.4
remapDict["breast_tumor_pathologic_n_stage"]["pn1c"] = 1.6
remapDict["breast_tumor_pathologic_n_stage"]["pn2"] = 2
remapDict["breast_tumor_pathologic_n_stage"]["pn2a"] = 2.2
remapDict["breast_tumor_pathologic_n_stage"]["pn2b"] = 2.4
remapDict["breast_tumor_pathologic_n_stage"]["pn3"] = 3
remapDict["breast_tumor_pathologic_n_stage"]["pn3a"] = 3.2
remapDict["breast_tumor_pathologic_n_stage"]["pn3b"] = 3.4
remapDict["breast_tumor_pathologic_n_stage"]["pn3c"] = 3.6
remapDict["breast_tumor_pathologic_n_stage"] = {}
remapDict["breast_tumor_pathologic_n_stage"]["na"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["nx"] = "NA"
remapDict["breast_tumor_pathologic_n_stage"]["n0"] = 0
remapDict["breast_tumor_pathologic_n_stage"]["n0(i-)"] = 0.2
remapDict["breast_tumor_pathologic_n_stage"]["n0_(i-)"] = 0.2
remapDict["breast_tumor_pathologic_n_stage"]["n0(i+)"] = 0.4
remapDict["breast_tumor_pathologic_n_stage"]["n0_(i+)"] = 0.4
remapDict["breast_tumor_pathologic_n_stage"]["n0_(mol+)"] = 0.3
remapDict["breast_tumor_pathologic_n_stage"]["n1"] = 1
remapDict["breast_tumor_pathologic_n_stage"]["n1mi"] = 1.1
remapDict["breast_tumor_pathologic_n_stage"]["n1a"] = 1.2
remapDict["breast_tumor_pathologic_n_stage"]["n1b"] = 1.4
remapDict["breast_tumor_pathologic_n_stage"]["n1c"] = 1.6
remapDict["breast_tumor_pathologic_n_stage"]["n2"] = 2
remapDict["breast_tumor_pathologic_n_stage"]["n2a"] = 2.2
remapDict["breast_tumor_pathologic_n_stage"]["n2b"] = 2.4
remapDict["breast_tumor_pathologic_n_stage"]["n3"] = 3
remapDict["breast_tumor_pathologic_n_stage"]["n3a"] = 3.2
remapDict["breast_tumor_pathologic_n_stage"]["n3b"] = 3.4
remapDict["breast_tumor_pathologic_n_stage"]["n3c"] = 3.6
remapDict["distant_metastasis_pathologic_spread"] = {}
remapDict["distant_metastasis_pathologic_spread"]["na"] = "NA"
remapDict["distant_metastasis_pathologic_spread"]["mx"] = "NA"
remapDict["distant_metastasis_pathologic_spread"]["m0"] = 0
remapDict["distant_metastasis_pathologic_spread"]["m1"] = 1
remapDict["distant_metastasis_pathologic_spread"]["m1a"] = 1.2
remapDict["distant_metastasis_pathologic_spread"]["m1b"] = 1.4
remapDict["breast_tumor_clinical_m_stage"] = {}
remapDict["breast_tumor_clinical_m_stage"]["na"] = "NA"
remapDict["breast_tumor_clinical_m_stage"]["mx"] = "NA"
remapDict["breast_tumor_clinical_m_stage"]["cm0_(i+)"] = "NA"
remapDict["breast_tumor_clinical_m_stage"]["m0"] = 0
remapDict["breast_tumor_clinical_m_stage"]["m1"] = 1
remapDict["breast_tumor_clinical_m_stage"]["m1a"] = 1.2
remapDict["breast_tumor_clinical_m_stage"]["m1b"] = 1.4
remapDict["residual_tumor"] = {}
remapDict["residual_tumor"]["na"] = "NA"
remapDict["residual_tumor"]["rx"] = "NA"
remapDict["residual_tumor"]["r0"] = 0
remapDict["residual_tumor"]["r1"] = 1
remapDict["residual_tumor"]["r2"] = 2
remapDict["her2_immunohistochemistry_level_result"] = {}
remapDict["her2_immunohistochemistry_level_result"]["na"] = "NA"
remapDict["her2_immunohistochemistry_level_result"]["0"] = 0
remapDict["her2_immunohistochemistry_level_result"]["1+"] = 1
remapDict["her2_immunohistochemistry_level_result"]["2+"] = 2
remapDict["her2_immunohistochemistry_level_result"]["3+"] = 3
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"] = {}
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["na"] = "NA"
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["0"] = 0
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["1+"] = 1
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["2+"] = 2
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["3+"] = 3
remapDict["breast_carcinoma_immunohistochemistry_pos_cell_score"]["4+"] = 4
remapDict["immunohistochemistry_positive_cell_score"] = {}
remapDict["immunohistochemistry_positive_cell_score"]["na"] = "NA"
remapDict["immunohistochemistry_positive_cell_score"]["0"] = 0
remapDict["immunohistochemistry_positive_cell_score"]["1+"] = 1
remapDict["immunohistochemistry_positive_cell_score"]["2+"] = 2
remapDict["immunohistochemistry_positive_cell_score"]["3+"] = 3
remapDict["immunohistochemistry_positive_cell_score"]["4+"] = 4
remapDict["progesterone_receptor_level_cell_percent_category"] = {}
remapDict["progesterone_receptor_level_cell_percent_category"]["na"] = "NA"
remapDict["progesterone_receptor_level_cell_percent_category"]["<10%"] = 0
remapDict["progesterone_receptor_level_cell_percent_category"]["10-19%"] = 1
remapDict["progesterone_receptor_level_cell_percent_category"]["20-29%"] = 2
remapDict["progesterone_receptor_level_cell_percent_category"]["30-39%"] = 3
remapDict["progesterone_receptor_level_cell_percent_category"]["40-49%"] = 4
remapDict["progesterone_receptor_level_cell_percent_category"]["50-59%"] = 5
remapDict["progesterone_receptor_level_cell_percent_category"]["60-69%"] = 6
remapDict["progesterone_receptor_level_cell_percent_category"]["70-79%"] = 7
remapDict["progesterone_receptor_level_cell_percent_category"]["80-89%"] = 8
remapDict["progesterone_receptor_level_cell_percent_category"]["90-99%"] = 9
remapDict["er_level_cell_percentage_category"] = {}
remapDict["er_level_cell_percentage_category"]["na"] = "NA"
remapDict["er_level_cell_percentage_category"]["<10%"] = 0
remapDict["er_level_cell_percentage_category"]["10-19%"] = 1
remapDict["er_level_cell_percentage_category"]["20-29%"] = 2
remapDict["er_level_cell_percentage_category"]["30-39%"] = 3
remapDict["er_level_cell_percentage_category"]["40-49%"] = 4
remapDict["er_level_cell_percentage_category"]["50-59%"] = 5
remapDict["er_level_cell_percentage_category"]["60-69%"] = 6
remapDict["er_level_cell_percentage_category"]["70-79%"] = 7
remapDict["er_level_cell_percentage_category"]["80-89%"] = 8
remapDict["er_level_cell_percentage_category"]["90-99%"] = 9
remapDict["her2_erbb_pos_finding_cell_percent_category"] = {}
remapDict["her2_erbb_pos_finding_cell_percent_category"]["na"] = "NA"
remapDict["her2_erbb_pos_finding_cell_percent_category"]["<10%"] = 0
remapDict["her2_erbb_pos_finding_cell_percent_category"]["10-19%"] = 1
remapDict["her2_erbb_pos_finding_cell_percent_category"]["20-29%"] = 2
remapDict["her2_erbb_pos_finding_cell_percent_category"]["30-39%"] = 3
remapDict["her2_erbb_pos_finding_cell_percent_category"]["40-49%"] = 4
remapDict["her2_erbb_pos_finding_cell_percent_category"]["50-59%"] = 5
remapDict["her2_erbb_pos_finding_cell_percent_category"]["60-69%"] = 6
remapDict["her2_erbb_pos_finding_cell_percent_category"]["70-79%"] = 7
remapDict["her2_erbb_pos_finding_cell_percent_category"]["80-89%"] = 8
remapDict["her2_erbb_pos_finding_cell_percent_category"]["90-99%"] = 9
remapDict["axillary_lymph_node_stage_method_type"] = {}
remapDict["axillary_lymph_node_stage_method_type"]["na"] = "NA"
remapDict["axillary_lymph_node_stage_method_type"]["OTHER_(SPECIFY)"] = "NA"
remapDict["axillary_lymph_node_stage_method_type"]["other_(specify)"] = "NA"
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def stringInList_CaseInsens ( aString, aList ):
for s in aList:
u = s.upper()
if ( aString.upper() == u ): return ( 1 )
return ( 0 )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def remapCategoricalFeatures(allClinDict):
print " "
print " in remapCategoricalFeatures "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
aKey2 = aKey.lower()
if (aKey2 in remapDict.keys()):
numRemap = 0
print " "
print " looking at <%s> " % aKey2
tmpV = allClinDict[aKey]
# print " part of original vector : ", tmpV[:10]
newV = [0] * len(tmpV)
for kk in range(len(tmpV)):
bKey2 = tmpV[kk].lower()
if (bKey2.startswith("stage_")):
bKey2 = bKey2[6:]
if (bKey2.startswith("stage ")):
bKey2 = bKey2[6:]
try:
newV[kk] = remapDict[aKey2][bKey2]
if (newV[kk] != "NA" and newV[kk] != "na"):
if (newV[kk].lower() != bKey2):
# print " remapping ... ", aKey, aKey2, kk,
# bKey2, remapDict[aKey2][bKey2]
numRemap += 1
except:
if (0):
print " WARNING in remapCategoricalFeatures ... nothing to remap to ??? "
print " <%s> <%s> %d <%s> " % (aKey, aKey2, kk, bKey2)
print " <%s> " % remapDict[aKey2]
# sys.exit(-1)
newV[kk] = bKey2
if (numRemap > 0):
print " --> using remapped values for <%s> " % aKey
print " mapping dictionary : ", remapDict[aKey2]
print " part of original vector : ", tmpV[:10]
print " part of new vector : ", newV[:10]
allClinDict[aKey] = newV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getNumPatients(allClinDict):
aKey = allClinDict.keys()[0]
return ( len(allClinDict[aKey]) )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def findProperKey(allClinDict, aString):
keyList = allClinDict.keys()
foundList = []
for aKey in keyList:
if ( aKey.lower().find(aString.lower()) >=0 ):
foundList += [ aKey ]
if ( len(foundList) == 0 ):
return ( "NO KEY" )
elif ( len(foundList) == 1 ):
return ( foundList[0] )
else:
## look for a perfect match ...
for mString in foundList:
mTokens = mString.split(':')
if ( len(mTokens) == 1 ):
if ( mTokens[0].lower() == aString.lower() ):
return ( mString )
elif ( len(mTokens) > 2 ):
try:
if ( mTokens[2].lower() == aString.lower() ):
return ( mString )
except:
print " findProperKey: ERROR in try ??? ", mString
print foundList
print " "
print " ERROR in findProperKey ??? multiple matches "
print " but none of them are perfect matches ... "
print aString
print foundList
print " "
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def computeLymphnodesFraction(allClinDict):
aKey = findProperKey ( allClinDict, "number_of_lymphnodes_positive_by_he" )
bKey = findProperKey ( allClinDict, "number_of_lymphnodes_examined" )
if (aKey not in allClinDict.keys()):
print " "
print " skipping computeLymphnodesFraction "
return (allClinDict)
if (bKey not in allClinDict.keys()):
print " "
print " skipping computeLymphnodesFraction "
return (allClinDict)
print " "
print " in computeLymphnodesFraction ... "
numClin = getNumPatients(allClinDict)
newV = [0] * numClin
for kk in range(numClin):
if (allClinDict[bKey][kk] == "NA"):
newV[kk] = "NA"
elif (allClinDict[aKey][kk] == "NA"):
newV[kk] = "NA"
elif (int(allClinDict[bKey][kk]) == 0):
newV[kk] = "NA"
else:
newV[kk] = float(allClinDict[aKey][kk]) / \
float(allClinDict[bKey][kk])
allClinDict["N:SAMP:fraction_lymphnodes_positive_by_he:::::"] = newV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def addTag2Key ( aKey, aTag ):
aTokens = aKey.split(':')
if ( len(aTokens) >= 7 ):
newKey = aTokens[0] + ':' + aTokens[1] + ':' + aTokens[2]
if ( aTag[0] == "_" ):
newKey += aTag
else:
newKey += "_" + aTag
for ii in range(3,len(aTokens)):
newKey += ":" + aTokens[ii]
else:
newKey = aKey
if ( aTag[0] == "_" ):
newKey += aTag
else:
newKey += "_" + aTag
return ( newKey )
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkBarcodes(allClinDict):
zKey = findProperKey (allClinDict, "bcr_patient_barcode" )
numClin = getNumPatients(allClinDict)
for ii in range(numClin):
if ( allClinDict[zKey][ii].find("_") >= 0 ):
print " BAD barcode !!! ", ii, allClinDict[zKey][ii]
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# fields of interest:
# days_to_birth
# days_to_initial_pathologic_diagnosis <-- this is always 0
# days_to_submitted_specimen_dx
# days_to_last_followup
# days_to_last_known_alive
# days_to_death
# also:
# new_tumor_event_after_initial_treatment
# days_to_new_tumor_event_after_initial_treatment
def addFollowupInfo(allClinDict):
print " "
print " in addFollowupInfo ... "
# ------------------------------------------------------------------------
# FIRST: if there is a days_to_last_known_alive, then check that it is
# used consistently, otherwise create it
zKey = findProperKey (allClinDict, "bcr_patient_barcode")
aKey = findProperKey (allClinDict, "days_to_last_known_alive")
bKey = findProperKey (allClinDict, "days_to_last_followup")
cKey = findProperKey (allClinDict, "days_to_death")
haveA = (aKey in allClinDict.keys())
haveB = (bKey in allClinDict.keys())
haveC = (cKey in allClinDict.keys())
print " "
print " STEP #1 "
print " have flags A, B, and C : ", haveA, haveB, haveC
## print " allClinDict.keys() : "
## print allClinDict.keys()
if (haveA):
# if we have the "days_to_last_known_alive" field, check that it
# is consistent with the other two fields ...
numClin = getNumPatients(allClinDict)
numNotNA = 0
for kk in range(numClin):
## if we have a 'days_to_death' field and it is not NA, then set 'days_to_last_known_alive' to that value
if (haveC):
if (str(allClinDict[cKey][kk]).upper() != "NA"):
allClinDict[aKey][kk] = allClinDict[cKey][kk]
## if we have a 'days_to_last_followup' field and it is not NA, then ...
if (haveB):
if (str(allClinDict[bKey][kk]).upper() != "NA"):
if (str(allClinDict[aKey][kk]).upper() == "NA"):
allClinDict[aKey][kk] = allClinDict[bKey][kk]
if (str(allClinDict[aKey][kk]).upper() != "NA"):
numNotNA += 1
print " UPDATED days_to_last_known_alive (%d) : " % numNotNA
print allClinDict[aKey]
else:
# create it ...
if ( aKey == "NO KEY" ): aKey = "N:CLIN:days_to_last_known_alive:::::"
numClin = getNumPatients(allClinDict)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
newVec[kk] = "NA"
if (haveC):
if (str(allClinDict[cKey][kk]).upper() != "NA"):
newVec[kk] = allClinDict[cKey][kk]
if (haveB):
if (str(allClinDict[bKey][kk]).upper() != "NA"):
if (str(newVec[kk]).upper() == "NA"):
newVec[kk] = allClinDict[bKey][kk]
if (str(newVec[kk]).upper() != "NA"):
numNotNA += 1
print " NEW days_to_last_known_alive (%d) : " % numNotNA
## print newVec
allClinDict[aKey] = newVec
# ------------------------------------------------------------------------
# SECOND: if there is a "days_to_submitted_specimen_dx", then create
# a set of "days_to_" features that instead of being relative
# to "initial_pathologic_diagnosis" are relative to "submitted_specimen"
print " "
print " STEP #2 "
aKey = findProperKey (allClinDict, "days_to_submitted_specimen_dx")
tKey = findProperKey (allClinDict, "days_to_initial_pathologic_diagnosis")
if (aKey in allClinDict.keys()):
haveA = 1
else:
print " do not have [days_to_submitted_specimen_dx] in allClinDict "
haveA = 0
if (tKey in allClinDict.keys()):
haveT = 1
else:
print " do not have [days_to_initial_pathologic_diagnosis] in allClinDict "
haveT = 0
try:
numClin = getNumPatients(allClinDict)
for bKey in allClinDict.keys():
if (haveA == 0): continue
if (bKey == aKey): continue
if (bKey.find("days_to_") >= 0):
newKey = addTag2Key ( bKey, "relSS" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_submitted_specimen_dx) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_submitted_specimen_dx)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
## and then we subtract 'delta days' from the original key to make the new relative key
newVec[kk] = allClinDict[bKey][kk] - deltaDays
print " STEP2a ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
if (bKey.find("age_at_") >= 0):
## make sure that this is not a "stage_at_" feature !!!
if ( bKey.find("stage_at_") >= 0 ): continue
newKey = addTag2Key ( bKey, "relSS" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_submitted_specimen_dx) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_submitted_specimen_dx)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
## and then we subtract 'delta days' from the original key to make the new relative key
## 04mar14 : actually we need to ADD here because "age" should go UP with deltaDays ...
newVec[kk] = allClinDict[bKey][kk] + ( float(deltaDays) / DAYS_PER_YEAR )
print " STEP2b ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
except:
print " --> failed in this try (x) "
doNothing = 1
# ------------------------------------------------------------------------
# THIRD: if there is a "days_to_sample_procurement", then create
# a set of "days_to_" features that instead of being relative
# to "initial_pathologic_diagnosis" are relative to "sample_procurement
print " "
print " STEP #3 "
aKey = findProperKey (allClinDict, "days_to_sample_procurement")
tKey = findProperKey (allClinDict, "days_to_initial_pathologic_diagnosis")
if (aKey in allClinDict.keys()):
haveA = 1
else:
print " do not have [days_to_sample_procurement] in allClinDict "
haveA = 0
if (tKey in allClinDict.keys()):
haveT = 1
else:
haveT = 0
print " do not have a [days_to_initial_pathologic_diagnosis] key "
try:
numClin = getNumPatients(allClinDict)
for bKey in allClinDict.keys():
if (haveA == 0): continue
if (bKey == aKey): continue
if (bKey.find("days_to_") >= 0):
## make sure that this is not one of the relSS features just added !!!
if ( bKey.find("relSS") >= 0 ): continue
newKey = addTag2Key ( bKey, "relSP" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_sample_procurement) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_sample_procurement)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
## and then we subtract 'delta days' from the original key to make the new relative key
newVec[kk] = allClinDict[bKey][kk] - deltaDays
print " STEP3a ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
if (bKey.find("age_at_") >= 0):
## make sure that this is not one of the relSS features just added !!!
if ( bKey.find("relSS") >= 0 ): continue
## make sure that this is not a "stage_at_" feature !!!
if ( bKey.find("stage_at_") >= 0 ): continue
newKey = addTag2Key ( bKey, "relSP" )
print " --> making newKey <%s> from bKey <%s> [%d] " % (newKey, bKey, numClin)
newVec = [0] * numClin
numNotNA = 0
for kk in range(numClin):
## initialize to NA
newVec[kk] = "NA"
## skip if an important value is NA
print " checking for important information ... ", aKey, bKey, tKey
print allClinDict[aKey][kk]
print allClinDict[bKey][kk]
if (str(allClinDict[aKey][kk]).upper() == "NA"): continue
if (str(allClinDict[bKey][kk]).upper() == "NA"): continue
if (haveT):
print allClinDict[tKey][kk]
if (str(allClinDict[tKey][kk]).upper() == "NA"): continue
## deltaDays is either (days_to_sample_procurement) - (days_to_initial_pathologic_diagnosis)
## or just (days_to_sample_procurement)
if (haveT):
deltaDays = allClinDict[aKey][kk] - allClinDict[tKey][kk]
else:
deltaDays = allClinDict[aKey][kk]
print " computed deltaDays : ", deltaDays
## and then we subtract 'delta days', scaled to years ...
## 03mar14 : actually we need to ADD here ...
newVec[kk] = allClinDict[bKey][kk] + ( float(deltaDays) / DAYS_PER_YEAR )
print " STEP3b ", kk, allClinDict[zKey][kk], allClinDict[bKey][kk], allClinDict[aKey][kk], deltaDays, newVec[kk]
numNotNA += 1
if ( numNotNA > 30 ):
print " adding new key (%d) : " % numNotNA, newKey
## print newVec[:5]
## print newVec[-5:]
allClinDict[newKey] = newVec
else:
print " NOT adding new key (%d) : ", numNotNA, newKey
except:
print " --> failed in this try (y) "
doNothing = 1
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# fields of interest:
# days_to_birth
# days_to_initial_pathologic_diagnosis <-- this is always 0
# days_to_submitted_specimen_dx
# days_to_last_followup
# days_to_last_known_alive
# days_to_death
# also:
# new_tumor_event_after_initial_treatment
# days_to_new_tumor_event_after_initial_treatment
def checkFollowupInfo(allClinDict):
print " "
print " in checkFollowupInfo ... "
# FIRST: if there is a days_to_last_known_alive, then check that it is
# used consistently, otherwise create it
zKey = findProperKey (allClinDict, "bcr_patient_barcode")
aKey = findProperKey (allClinDict, "days_to_last_known_alive")
bKey = findProperKey (allClinDict, "days_to_last_followup")
cKey = findProperKey (allClinDict, "days_to_death")
dKey = findProperKey (allClinDict, "vital_status")
haveA = (aKey in allClinDict.keys())
haveB = (bKey in allClinDict.keys())
haveC = (cKey in allClinDict.keys())
haveD = (dKey in allClinDict.keys())
print " have flags A, B, C and D : ", haveA, haveB, haveC, haveD
if ( not haveD ):
print " skipping this function ... requires vital_status "
return (allClinDict)
## print " allClinDict.keys() : "
## print allClinDict.keys()
numClin = getNumPatients(allClinDict)
# range of days_to_last_known_alive is typically something like [0,3196]
for kk in range(numClin):
if (str(allClinDict[dKey][kk]).upper() == "DEAD"):
if (str(allClinDict[cKey][kk]).upper() == "NA"):
print " ERROR !!! need to know when this person died !!! ", allClinDict[zKey][kk]
print kk
print aKey, allClinDict[aKey][kk]
print bKey, allClinDict[bKey][kk]
print cKey, allClinDict[cKey][kk]
print dKey, allClinDict[dKey][kk]
print " UPDATING vital_status to Alive ... "
print " "
## because we do not have a days_to_death value, we are going to call this person "Alive"
allClinDict[dKey][kk] = "Alive"
if (str(allClinDict[dKey][kk]).upper() == "ALIVE"):
if (str(allClinDict[aKey][kk]).upper() == "NA"):
if (str(allClinDict[bKey][kk]).upper() == "NA"):
print " ERROR !!! no information about follow-up ??? ", allClinDict[zKey][kk]
print kk
print aKey, allClinDict[aKey][kk]
print bKey, allClinDict[bKey][kk]
print cKey, allClinDict[cKey][kk]
print dKey, allClinDict[dKey][kk]
print " UPDATING days_to_last_known_alive and days_to_last_followup to 0 "
print " "
allClinDict[aKey][kk] = 0
allClinDict[bKey][kk] = 0
else:
print " ERROR in checkFollowupInfo ... how did we get here ??? "
sys.exit(-1)
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# derive the preferred stage tumor stage from the comparison of the
# reported one with the derived one
def PreferredStage(reported, computed):
t = testTumorStage(reported, computed)
if (t == "AGREE"):
return(reported)
if (t == "Stage cannot be derived from TNM"):
return(reported)
if (t == "Derived stage is more specific"):
return(repStage(computed)) # For SupTab1 use return(computed)
if (t == "Stage can be derived from TNM"):
return(repStage(computed)) # For SupTab1 use return(computed)
if (t == "Stage more specific than TNM"):
return(reported)
if (t == "DISAGREE"):
return(reported) # assuming the reported one to be valid!
return("Error: Lack a preferred stage")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Return first element of a vector, or if input is string, the string itself
def repStage(substage):
if (type(substage) is str):
return(substage)
if (type(substage) is list):
return(substage[0])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Characterize the difference between reported and inferred tumor stage
def testTumorStage(reported, computed):
# Agreement includes "in" relationship and NA equivalence
if (type(computed) is list):
if (reported in computed):
return("AGREE")
if (type(computed) is str):
if (reported == computed):
return("AGREE")
if (((reported == "STAGE IVA") | (reported == "STAGE IVB")) & (computed == "STAGE IV")):
return("Stage more specific than TNM")
if ((reported == "NA") & (computed != "NA")):
return("Stage can be derived from TNM")
if ((reported != "NA") & (computed == "NA")):
return("Stage cannot be derived from TNM")
if (repStage(computed).startswith(reported)):
return("Derived stage is more specific")
return("DISAGREE")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Derive Tumor Stage from TNM and AJCC table, 7th edition
# sometimes if we get something like "M1" when it should actually be "M1A"
# or "M1B", we will pick the first/lowest thing that it could be ...
def getTumorStage(T, N, M):
print " WARNING ... this function should NOT be called ... "
sys.exit(-1)
T = T.upper()
N = N.upper()
M = M.upper()
if (M == "M1"):
# Seems to be TCGA choice if IVA, IVB not specified
return ("STAGE IV")
if (M == "M1A"):
return ("STAGE IVA")
if (M == "M1B"):
return ("STAGE IVB")
if (T == "TX"):
T = "NA"
if (N == "NX"):
N = "NA"
if (M == "MX"):
M = "NA"
if (T == "NA" or N == "NA" or M == "NA"):
return ("NA")
if (T == "T0" and N == "N0" and M == "M0"):
return ("STAGE 0")
if (T == "Tis" and N == "N0" and M == "M0"):
return ("STAGE 0")
if (T == "T1" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T1A" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T1B" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T2" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T2A" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T2B" and N == "N0" and M == "M0"):
return ("STAGE I")
if (T == "T3" and N == "N0" and M == "M0"):
return ("STAGE IIA")
if (T == "T3A" and N == "N0" and M == "M0"):
return ("STAGE IIA")
if (T == "T3B" and N == "N0" and M == "M0"):
return ("STAGE IIB")
if (T == "T3C" and N == "N0" and M == "M0"):
return ("STAGE IIB")
if (T == "T4A" and N == "N0" and M == "M0"):
return ("STAGE IIB")
if (T == "T4B" and N == "N0" and M == "M0"):
return ("STAGE IIC")
if (T == "T4" and N == "N0" and M == "M0"):
return (["STAGE IIB", "STAGE IIC"])
if (T == "T1" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1A" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1B" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N1A" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N1B" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N1C" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N2" and M == "M0"):
return (["STAGE IIIA", "STAGE IIIB"]) # CHOICE IIIA, IIIB
if (T == "T1B" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T1" and N == "N2A" and M == "M0"):
return ("STAGE IIIA")
if (T == "T1" and N == "N3" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2A" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2B" and N == "N1" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1A" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1B" and M == "M0"):
return ("STAGE IIIA")
if (T == "T2" and N == "N1C" and M == "M0"):
return ("STAGE IIIA")
if (T == "T3" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3A" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3B" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N1A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N1B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N1C" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4A" and N == "N1C" and M == "M0"):
return ("STAGE IIIB")
if (T == "T4" and N == "N1" and M == "M0"):
return (["STAGE IIIB", "STAGE IIIC"])
if (T == "T2" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2A" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2B" and N == "N2" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2" and N == "N2A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2" and N == "N2B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T2" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T2" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T2B" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T3" and N == "N2" and M == "M0"):
return (["STAGE IIIB", "STAGE IIIC"]) # CHOICE IIIB, IIIC
if (T == "T3" and N == "N2A" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T1" and N == "N2B" and M == "M0"):
return ("STAGE IIIB")
if (T == "T3" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T3" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4" and N == "N2" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4" and N == "N3A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4A" and N == "N3" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N1C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2C" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2A" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N2B" and M == "M0"):
return ("STAGE IIIC")
if (T == "T4B" and N == "N3A" and M == "M0"):
return ("STAGE IIIC")
# We reach this point if all values are non-NA, but combination is not in
# AJCC tumor table
print " ERROR in getTumorStage ??? ", T, N, M
return ("Not in AJCC Table?")
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkTumorStage(allClinDict):
if ("tumor_stage" not in allClinDict.keys()):
print " skipping checkTumorStage ... "
return (allClinDict)
else:
print " running checkTumorStage ... "
numClin = getNumPatients(allClinDict)
print " total number of patients : ", numClin
if (0):
stringList = ["tumor", "stage", "spread"]
for aKey in allClinDict.keys():
for aString in stringList:
if (aKey.find(aString) >= 0):
print aKey
reqKeyList = [
"bcr_patient_barcode", "tumor_stage", "primary_tumor_pathologic_spread",
"lymphnode_pathologic_spread", "distant_metastasis_pathologic_spread"]
numNotFound = 0
for aKey in reqKeyList:
if (aKey not in allClinDict.keys()):
numNotFound += 1
if (numNotFound > 0):
print " skipping checkTumorStage ... "
return (allClinDict)
pKey = getProperKey ( allClinDict, "bcr_patient_barcode" )
sKey = getProperKey ( allClinDict, "tumor_stage" )
tKey = getProperKey ( allClinDict, "primary_tumor_pathologic_spread" )
nKey = getProperKey ( allClinDict, "lymphnode_pathologic_spread" )
mKey = getProperKey ( allClinDict, "distant_metastasis_pathologic_spread" )
for ii in range(numClin):
aCode = allClinDict[pKey][ii]
curTumorStage = allClinDict[sKey][ii]
curT = allClinDict[tKey][ii]
curN = allClinDict[nKey][ii]
curM = allClinDict[mKey][ii]
# print " checking tumor stage for <%s> <%s> <%s> <%s> <%s> " % (
# aCode, curTumorStage, curN, curM, curT )
## removing this 15aug2014 ...
if ( 0 ):
curTumorStage = curTumorStage.upper()
curTumorStage = curTumorStage.strip()
if (curTumorStage != "NA"):
if (not curTumorStage.startswith("STAGE ")):
curTumorStage = "STAGE " + curTumorStage
# as of 09nov12, NOT attempting to derive tumor stage from T, N, and M
if (0):
# get AJCC-derived tumor stage, compare to DCC value, and decide
# which to use
ajccStage = getTumorStage(curT, curN, curM)
newStage = PreferredStage(curTumorStage, ajccStage)
allClinDict[sKey][ii] = newStage
# report
if (type(ajccStage) is list):
ajccString = ' OR '.join(ajccStage)
else:
ajccString = ajccStage
print aCode.upper() + ', TNM:' + curT.upper() + ' ' + curN.upper() + ' ' + curM.upper() + ', DCC Stage:' \
+ curTumorStage + ', AJCC Stage:' + ajccString + ', Comparison:' \
+ \
testTumorStage(curTumorStage, ajccStage) + \
', Will use: ' + newStage
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkVitalStatus(allClinDict):
vsKey = findProperKey ( allClinDict, "vital_status" )
bcKey = findProperKey ( allClinDict, "bcr_patient_barcode" )
if ( vsKey == "NO KEY" ):
print " skipping checkVitalStatus ... "
return (allClinDict)
print " running checkVitalStatus ... "
numClin = getNumPatients(allClinDict)
print " total number of patients : ", numClin
numLC = 0
numDC = 0
for ii in range(numClin):
aCode = allClinDict[bcKey][ii]
curStatus = allClinDict[vsKey][ii]
doChange = 1
try:
newStatus = curStatus
except:
try:
if (curStatus == 0):
newStatus = "Alive"
numLC += 1
elif (curStatus == 1):
newStatus = "Dead"
numDC += 1
except:
doChange = 0
if (doChange):
allClinDict[vsKey][ii] = newStatus
if (numLC + numDC > 0):
print " WARNING: changed some vital status fields ... %d %d " % (numLC, numDC)
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def updateAge(allClinDict):
pKey = findProperKey ( allClinDict, "bcr_patient_barcode" )
aKey = findProperKey ( allClinDict, "age_at_initial_pathologic_diagnosis" )
bKey = findProperKey ( allClinDict, "days_to_birth" )
print " running updateAge ... "
numClin = getNumPatients(allClinDict)
print " total number of patients : ", numClin
for ii in range(numClin):
try:
aCode = allClinDict[pKey][ii]
curAge = allClinDict[aKey][ii]
curD2B = allClinDict[bKey][ii]
newAge = float(0 - int(curD2B)) / DAYS_PER_YEAR
# now we want to limit the 'precision' to two decimal places
newAge = float(int((100. * newAge) + 0.49)) / 100.
if (abs(curAge - int(newAge)) > 0):
print " ERROR in updateAge ??? ", curAge, curD2B, newAge, aCode
allClinDict[aKey][ii] = newAge
except:
doNothing = 1
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeParens(oneKey):
print " in removeParens ... "
origUniqList = []
newUniqList = []
newKey = []
for aLabel in oneKey:
if (aLabel not in origUniqList):
origUniqList += [aLabel]
if (aLabel.find("(") >= 0):
print " --> found open paren ... at %d in <%s> " % (aLabel.find("("), aLabel)
bLabel = ""
copyOn = 1
for ii in range(len(aLabel)):
if (aLabel[ii] == "("):
copyOn = 0
if (copyOn):
bLabel += aLabel[ii]
if (aLabel[ii] == ")"):
copyOn = 1
if (bLabel.startswith("_")):
bLabel = bLabel[1:]
if (bLabel.endswith("_")):
bLabel = bLabel[:-1]
newKey += [bLabel]
if (bLabel not in newUniqList):
newUniqList += [bLabel]
else:
newKey += [aLabel]
if (aLabel not in newUniqList):
newUniqList += [aLabel]
print origUniqList
print newUniqList
if (len(newUniqList) == len(origUniqList)):
print " --> removing parenthetical strings "
print origUniqList
print newUniqList
return (newKey)
else:
print " NOT removing parenthetical strings "
return (oneKey)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeSpecialChars(oneKey):
okExceptions = ['4+']
# ran into a strange case here where most of the elements were strings
# like 'grade_1' but some were just integers like 3 :(
# so ... these next few blocks of code are TOTAL HACKS ...
numInt = 0
numNot = 0
aString = ''
aInt = 9999
for ii in range(len(oneKey)):
aLabel = str(oneKey[ii])
if (aLabel.upper() == "NA"):
continue
if (aLabel.upper() == "UNKNOWN"):
oneKey[ii] = "NA"
continue
try:
iVal = int(aLabel)
numInt += 1
aInt = iVal
except:
numNot += 1
aString = aLabel
print " number of integers = %d number NOT = %d " % (numInt, numNot)
if (numInt > 0 and numNot > 0):
# for now, we are just checking for 'grade' strings that are sometimes
# 'grade_3' and sometimes just '3'
if (aString.lower().startswith("grade_")):
for ii in range(len(oneKey)):
aLabel = str(oneKey[ii])
if (aLabel.upper() == "NA"):
continue
if (not aLabel.lower().startswith("grade_")):
try:
iVal = int(aLabel)
aString = "Grade_%d" % iVal
oneKey[ii] = aString
except:
print " FAILED to prepend grade ??? ", aLabel
sys.exit(-1)
# or if there are at least twice as many strings as integers, then we
# will cast the integers to strings ...
elif (numInt < (numNot / 2)):
for ii in range(len(oneKey)):
aLabel = str(oneKey[ii])
if (aLabel.upper() == "NA"):
continue
try:
iVal = int(aLabel)
oneKey[ii] = str(iVal)
except:
doNothing = 1
elif (aString not in okExceptions):
if ( 1 ):
print " WARNING ... something odd about this feature ... ", aInt, aString
print oneKey
## return ([])
## sys.exit(-1)
origUniqList = []
newUniqList = []
newKey = []
for aLabel in oneKey:
if (aLabel not in origUniqList):
origUniqList += [aLabel]
bLabel = ""
try:
for ii in range(len(aLabel)):
if (aLabel[ii] == ' '):
bLabel += "_"
elif (aLabel[ii] == "'"):
bLabel += "_"
elif (aLabel[ii] == '"'):
bLabel += "_"
elif (aLabel[ii] == ':'):
bLabel += "_"
elif (aLabel[ii] == '/'):
bLabel += "_"
# elif ( aLabel[ii] == '-' ):
## bLabel += "_"
elif (aLabel[ii] == '.'):
bLabel += "_"
elif (aLabel[ii] == ','):
bLabel += "_"
else:
bLabel += aLabel[ii]
except:
print " ERROR in removeSpecialChars ??? "
print " oneKey = <%s> " % (oneKey)
print " aLabel = <%s> " % (aLabel)
sys.exit(-1)
ii = bLabel.find("__")
while (ii >= 0):
bLabel = bLabel[:ii] + bLabel[ii + 1:]
ii = bLabel.find("__")
newKey += [bLabel]
if (bLabel not in newUniqList):
newUniqList += [bLabel]
print origUniqList
print newUniqList
if (len(newUniqList) == len(origUniqList)):
return (newKey)
else:
print " NOT removing parenthetical strings "
return (oneKey)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getCommonPrefix(aLabel, bLabel):
nn = 0
while (aLabel[nn].lower() == bLabel[nn].lower()):
nn += 1
if (nn >= len(aLabel)):
return (aLabel[:nn])
if (nn >= len(bLabel)):
return (aLabel[:nn])
return (aLabel[:nn])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getCommonSuffix(aLabel, bLabel):
nn = -1
while (aLabel[nn].lower() == bLabel[nn].lower()):
nn -= 1
if (-nn > len(aLabel)):
return (aLabel)
if (-nn > len(bLabel)):
return (bLabel)
if (nn == -1):
return ("")
else:
return (aLabel[nn + 1:])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeCommonPrefix(oneKey, labelList):
print " in removeCommonPrefix : ", labelList
madeChange = 0
nLabel = len(labelList)
for ii in range(nLabel):
for jj in range(ii + 1, nLabel):
commonPrefix = getCommonPrefix(labelList[ii], labelList[jj])
# if the commonPrefix is *ever* the entire string, then
# we cannot really use this ...
if (commonPrefix == labelList[ii]):
continue
if (commonPrefix == labelList[jj]):
continue
if (len(commonPrefix) > 4):
print ii, jj, commonPrefix
newKey = []
for cLabel in oneKey:
if (cLabel.lower().startswith(commonPrefix)):
dLabel = cLabel[len(commonPrefix):]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[0] == '_'):
dLabel = dLabel[1:]
newKey += [dLabel]
newList = []
for cLabel in labelList:
if (cLabel.lower().startswith(commonPrefix)):
dLabel = cLabel[len(commonPrefix):]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[0] == '_'):
dLabel = dLabel[1:]
newList += [dLabel]
if (len(labelList) == len(newList)):
labelList = newList
oneKey = newKey
if (madeChange > 0):
print " after removeCommonPrefix : ", madeChange, labelList
return (oneKey, labelList)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def removeCommonSuffix(oneKey, labelList):
print " in removeCommonSuffix : ", labelList
madeChange = 0
nLabel = len(labelList)
for ii in range(nLabel):
for jj in range(ii + 1, nLabel):
commonSuffix = getCommonSuffix(labelList[ii], labelList[jj])
# if the commonSuffix is *ever* the entire string, then
# we cannot really use this ...
if (commonSuffix == labelList[ii]):
continue
if (commonSuffix == labelList[jj]):
continue
if (len(commonSuffix) > 4):
print ii, jj, commonSuffix
newKey = []
for cLabel in oneKey:
if (cLabel.lower().endswith(commonSuffix)):
dLabel = cLabel[:-len(commonSuffix)]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[-1] == '_'):
dLabel = dLabel[:-1]
newKey += [dLabel]
newList = []
for cLabel in labelList:
if (cLabel.lower().endswith(commonSuffix)):
dLabel = cLabel[:-len(commonSuffix)]
if (len(dLabel) < 4):
dLabel = cLabel
else:
madeChange += 1
else:
dLabel = cLabel
if (dLabel[-1] == '_'):
dLabel = dLabel[:-1]
newList += [dLabel]
if (len(labelList) == len(newList)):
labelList = newList
oneKey = newKey
if (0):
print " removeCommonSuffix has not yet been fully tested ... "
print labelList
print oneKey
sys.exit(-1)
if (madeChange > 0):
print " after removeCommonSuffix : ", madeChange, labelList
return (oneKey, labelList)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def abbrevCategStrings(allClinDict):
print " "
print " in abbrevCategStrings ... "
print " "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey.find("bcr_patient_barcode") >= 0):
print " all barcodes : "
print allClinDict[aKey]
print " done "
continue
(keyType, nCount, nNA, nCard, labelList, labelCount) = miscClin.lookAtKey(allClinDict[aKey])
print aKey, keyType, nCount, nNA
if (keyType == "NOMINAL"):
# remove weird characters from the strings ...
print " calling removeSpecialChars ... <%s> " % (aKey)
allClinDict[aKey] = removeSpecialChars(allClinDict[aKey])
# if we get nothing back, then skip ...
if (allClinDict[aKey] == []):
print " WARNING ... got nothing back ??? ", aKey
continue
# otherwise, look at cardinality, type, etc ...
(keyType, nCount, nNA, nCard, labelList, labelCount) = miscClin.lookAtKey(allClinDict[aKey])
maxLen = 0
skipFlag = 0
for aLabel in labelList:
try:
maxLen = max(maxLen, len(aLabel))
except:
print " what is up with this key ??? ", aKey, labelList
skipFlag = 1
if (skipFlag):
continue
if (maxLen > 10):
## print aKey, labelList, maxLen
# first try at making the labels a bit shorter by removing
# parenthetical elements ...
allClinDict[aKey] = removeParens(allClinDict[aKey])
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
maxLen = 0
for aLabel in labelList:
maxLen = max(maxLen, len(aLabel))
## print aKey, labelList, maxLen
# removing this step for now (04dec12)
if (0):
# next try to remove common prefixes or suffixes ...
if (maxLen > 10):
(allClinDict[aKey], labelList) = removeCommonPrefix(
allClinDict[aKey], labelList)
(allClinDict[aKey], labelList) = removeCommonSuffix(
allClinDict[aKey], labelList)
maxLen = 0
for aLabel in labelList:
maxLen = max(maxLen, len(aLabel))
## print aKey, labelList, maxLen
if (maxLen > 25):
print " --> strings are still rather long, but not sure what to do about this ... "
print labelList, maxLen
print " "
print " "
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkPrefix(labelList, aPrefix):
nLabel = len(labelList)
nHas = 0
for aLabel in labelList:
bLabel = aLabel.upper()
if (bLabel == "NA"):
nLabel -= 1
continue
if (bLabel.startswith(aPrefix)):
nHas += 1
if ((nHas + 2) >= nLabel): return (1)
return (0)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def translateArabic(aLabel, usePrefix):
# print " in translateArabic ... "
pLen = len(usePrefix)
bLabel = aLabel.upper()
if (bLabel.startswith(usePrefix)):
bLabel = bLabel[pLen:]
if (bLabel[0] == "_"):
bLabel = bLabel[1:]
bLen = len(bLabel)
found = 0
for iLen in range(bLen, 0, -1):
# print found, iLen, bLabel[:iLen]
if (not found):
try:
curN = int(bLabel[:iLen])
found = 1
except:
doNothing = 1
if (not found):
# X means that it could not be assessed, so returning NA
if (bLabel == "X"):
return ("NA")
# B means 'borderline' but returning NA ...
elif (bLabel == "B"):
return ("NA")
else:
print " ERROR ??? <%s> <%s> --> returning NA " % (bLabel, usePrefix)
return ("NA")
rLen = len(str(curN))
if (len(bLabel) > rLen):
bLabel = bLabel[rLen:]
if (bLabel == "A"):
curN += 0.2
elif (bLabel == "B"):
curN += 0.4
elif (bLabel == "C"):
curN += 0.6
else:
print " left over in translateArabic <%s> <%s> <%s> " % (bLabel, aLabel, usePrefix)
return (curN)
else:
return ("NA")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def translateRoman(aLabel, usePrefix):
romanList = ["VIII", "III", "VII", "IV", "IX", "II", "VI", "I", "V", "X"]
numbrList = [8, 3, 7, 4, 9, 2, 6, 1, 5, 10]
pLen = len(usePrefix)
bLabel = aLabel.upper()
if (bLabel.startswith(usePrefix)):
bLabel = bLabel[pLen:]
if (bLabel[0] == "_"):
bLabel = bLabel[1:]
found = 0
for kk in range(len(romanList)):
if (not found):
if (bLabel.startswith(romanList[kk])):
found = 1
curKK = kk
curN = numbrList[kk]
curR = romanList[kk]
if (not found):
if (bLabel == "X"):
return ("NA")
elif (bLabel == "TIS"):
return ("NA")
else:
print " ERROR ??? ", bLabel, usePrefix
sys.exit(-1)
rLen = len(curR)
if (len(bLabel) > rLen):
bLabel = bLabel[rLen:]
if (bLabel == "A"):
curN += 0.2
elif (bLabel == "B"):
curN += 0.4
elif (bLabel == "C"):
curN += 0.6
else:
print " left over in translateRoman <%s> <%s> <%s> " % (bLabel, aLabel, usePrefix)
return (curN)
else:
return ("NA")
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def checkRomanNumerals(labelList, usePrefix):
skipList = ["_"]
stripList = ["A", "B", "C", "X", "0"]
romanList = ["I", "V", "X"]
pLen = len(usePrefix)
yesR = 0
notR = 0
for aLabel in labelList:
bLabel = aLabel.upper()
if (bLabel.startswith(usePrefix)):
bLabel = bLabel[pLen:]
if (bLabel[-1] in stripList):
bLabel = bLabel[:-1]
for ii in range(len(bLabel)):
if (bLabel[ii] in romanList):
yesR += 1
else:
if (bLabel[ii] not in skipList):
notR += 1
# print " in checkRomanNumerals : ", yesR, notR
if (notR == 0):
return (1)
if (notR > yesR):
return (0)
if (yesR > 0):
print " ??? strange counts in checkRomanNumerals ??? "
return (1)
return (0)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# this function tries to create a numerical feature for a categorical
# feature ...
def addNumericalFeatures(allClinDict):
print " "
print " "
print " in addNumericalFeatures ... "
keyList = allClinDict.keys()
keyList.sort()
# CAREFUL: add on things only at the end of this list ...
prefixList = ["T", "N", "M", "STAGE", "GRADE", "G", "PT"]
nPrefix = len(prefixList)
prefixBits = [0] * nPrefix
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
if (keyType == "NOMINAL"):
if (nCard > 2 and nCard < 15):
tmpKey = aKey.lower()
if (tmpKey.find("stage") >= 0 or tmpKey.find("grade") >= 0 or
tmpKey.find("pathologic_spread") >= 0):
print " considering this categorical feature ... ", aKey, keyType, nCard, labelList, labelCount
for iP in range(nPrefix):
aPrefix = prefixList[iP]
prefixBits[iP] = checkPrefix(labelList, aPrefix)
# if the 'GRADE' bit gets set, then unset the 'G' bit
if (prefixBits[4]):
prefixBits[5] = 0
# print prefixBits
usePrefix = ""
if (sum(prefixBits) == 1):
for iP in range(nPrefix):
if (prefixBits[iP]):
usePrefix = prefixList[iP]
elif (sum(prefixBits) > 1):
print " ERROR ??? how can it have multiple prefix bits ON ??? "
sys.exit(-1)
# print " usePrefix <%s> " % usePrefix
isRoman = checkRomanNumerals(labelList, usePrefix)
# print " isRoman = %d " % isRoman
if (aKey[1] == ":"):
tokenList = aKey.split(':')
newLabel = "N:" + \
tokenList[1] + ":" + tokenList[2] + "_derived"
for ii in range(3, len(tokenList)):
newLabel += ":" + tokenList[ii]
else:
newLabel = "N:CLIN:" + aKey + "_derived"
if (newLabel in allClinDict.keys()):
print " this feature label already exists ??? ", newLabel
sys.exit(-1)
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = [0] * numClin
for kk in range(numClin):
if (curV[kk] == "NA"):
tmpV[kk] = "NA"
elif (isRoman):
tmpV[kk] = translateRoman(curV[kk], usePrefix)
else:
tmpV[kk] = translateArabic(curV[kk], usePrefix)
if (0):
if (tmpV[kk] == 0):
print " why is tmpV[kk] still ZERO ??? ", kk, numClin, curV[kk], usePrefix, tmpV[kk]
numNA = 0
notNA = 0
for kk in range(numClin):
if (tmpV[kk] == "NA"):
numNA += 1
else:
notNA += 1
if (numNA > 10 * notNA):
print " --> NOT adding this new feature <%s> " % newLabel, list(set(tmpV)), numNA, notNA, usePrefix, isRoman
else:
print " --> ADDING new feature !!! <%s> " % newLabel, list(set(tmpV)), numNA, notNA, usePrefix, isRoman
allClinDict[newLabel] = tmpV
print " "
print " "
print " "
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getMappingDict(featName, auxName):
mapDict = {}
if (featName[1] == ':'):
if (featName[6] == ':'):
tokenList = featName.split(':')
tmpFeatName = tokenList[2]
else:
tmpFeatName = featName
try:
mapFilename = "../" + auxName + "/" + tmpFeatName + ".map"
fh = file(mapFilename)
firstLine = 1
for aLine in fh:
aLine = aLine.strip()
## aLine = aLine.upper()
tokenList = aLine.split('\t')
if (firstLine):
if (tokenList[0].upper() == tmpFeatName.upper()):
numNew = len(tokenList) - 1
newNames = tokenList[1:]
print newNames
firstLine = 0
else:
print " ERROR ??? invalid mapping file ??? "
print mapFilename
print tokenList
print tmpFeatName
print " FAILING out of TRY "
sys.exit(-1)
else:
mapDict[str(tokenList[0])] = tokenList[1:]
fh.close()
print " mapping dictionary read from <%s> : " % mapFilename
print mapDict
print " "
return (mapDict, newNames)
except:
return (mapDict, [])
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def getMapping ( mapDict, curV, ii ):
for k in mapDict.keys():
if ( k.lower() == curV.lower() ):
return ( mapDict[k][ii] )
print " FAILED TO GET MAPPING ??? ", curV, ii
print mapDict
sys.exit(-1)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def addDerivedFeatures(allClinDict, auxName):
print " "
print " "
print " in addDerivedFeatures ... "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList, labelCount) = miscClin.lookAtKey(allClinDict[aKey])
print " considering key <%s> " % (aKey), keyType, nCard
if (keyType == "NOMINAL"):
if (nCard > 2 and nCard < 25):
tmpKey = aKey.lower()
tmpList = []
for aLabel in labelList:
tmpL = str(aLabel)
if (tmpL not in tmpList):
tmpList += [tmpL]
labelList = tmpList
if (1):
print " considering this categorical feature ... ", aKey, keyType, nCard, labelList, labelCount
(mapDict, newNames) = getMappingDict(aKey, auxName)
# if there is no mapping file, then we won't be making any
# new features ...
if (len(newNames) == 0):
continue
# sanity check ...
if (0):
for bKey in mapDict.keys():
if ( stringInList_CaseInsens ( bKey, labelList ) ):
print " ERROR ??? mapping does not match this feature ??? "
print mapDict
print labelList
sys.exit(-1)
if (1):
for bLabel in labelList:
try:
if ( not stringInList_CaseInsens ( bLabel, mapDict.keys() ) ):
print " ************************************************** "
print " ERROR ??? feature value not in mapDict ??? ", bLabel
print " labelList : ", labelList
print " mapDict : ", mapDict
print " --> WILL NOT ADD ANY DERIVED FEATURES AT THIS TIME "
print " ************************************************** "
continue
# sys.exit(-1)
except:
doNothing = 1
# if there is no mapping file, then we won't be making any
# new features ...
if (len(newNames) == 0):
continue
# but if we do have one or more mappings, then we need
# to create those features ...
for ithName in range(len(newNames)):
aName = newNames[ithName]
print " looping over %d mappings ... " % len(newNames), ithName, aName
# the first thing we need to figure out is whether this is another
# categorical feature, or a numerical one ...
isNum = 1
uVec = []
for bKey in mapDict.keys():
curVal = mapDict[bKey][ithName]
if (curVal == "NA"):
continue
if ( stringInList_CaseInsens ( curVal, uVec ) ):
uVec += [curVal]
try:
fVal = float(curVal)
except:
isNum = 0
print " is numerical ??? ", isNum
if (len(uVec) == 1):
print " mapping to a constant ??? "
sys.exit(-1)
elif (len(uVec) == 2):
print " mapping is binary "
# if the mapping produces a binary feature, then
# over-ride the numerical feature
if (isNum):
print " over-riding the fact that the features LOOKS numerical ... "
isNum = 0
if (aName[1] == ":"):
if (aName[0] == "N"):
if (not isNum):
print " ERROR ??? new feature does not look to be numerical ???? "
print aName, uVec
sys.exit(-1)
# start setting up the new feature ...
newLabel = aName
if (newLabel in allClinDict.keys()):
print " this feature label already exists ??? ", newLabel
sys.exit(-1)
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = [0] * numClin
for kk in range(numClin):
if (curV[kk].upper() == "NA"):
tmpV[kk] = "NA"
else:
try:
tmpV[kk] = getMapping ( mapDict, curV[kk], ithName )
## tmpV[kk] = mapDict[curV[kk]][ithName]
except:
print " ERROR ??? failed to map ??? setting to NA but MUST FIX !!! "
print kk, curV[kk], ithName
print mapDict
if (1):
tmpV[kk] = "NA"
else:
sys.exit(-1)
numNA = 0
notNA = 0
for kk in range(numClin):
if (tmpV[kk] == "NA"):
numNA += 1
else:
notNA += 1
if (numNA > 10 * notNA):
print " --> NOT adding this new feature <%s> " % newLabel, list(set(tmpV)), numNA, notNA
else:
print " --> ADDING new feature !!! <%s> " % newLabel, list(set(tmpV)), numNA, notNA
allClinDict[newLabel] = tmpV
print " "
print " "
print " "
print " RETURNING from addDerivedFeatures ... "
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# this function creates N binary indicator vectors based on a single nominal
# (categorical) variable of cardinality N -- the indicator vectors still
# contain strings ("I0" and "I1") so that we can tell that they are still not
# truly "numeric" vectors ...
def addIndicatorFeatures(allClinDict):
magicStrings = ["patient", "person", "vital", "surviv", "race", "ethnic",
"prior", "gender", "age_at", "ageat", "radiation", "chemo",
"therapy", "treat", "performance", "days_to_", "daysto",
"year_of", "yearof", "surgical", "recurrence", "pregnancies"]
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
if (keyType == "NOMINAL"):
if (nCard > 2 and nCard < 27):
print " "
print " "
print " in addIndicatorFeatures ... ", aKey, keyType, nCard, labelList, labelCount
for aLabel in labelList:
# sometimes even though we have a "categorical" feature, some of the
# categories appear to be integers or floating point values
# ...
if (type(aLabel) is float):
print " we seem to have a floating point value ??? ", aLabel
iVal = int(aLabel + 0.001)
xVal = float(aLabel) - iVal
print iVal, xVal
if (abs(xVal) < 0.001):
aLabel = "%d" % iVal
else:
aLabel = str(aLabel)
elif (type(aLabel) is int):
iVal = int(aLabel)
aLabel = "%d" % iVal
print " "
## print aKey, aLabel
try:
# 012345678901234567890123456789...
# C:CLIN:<label>
# C:CLIN:<label>:a:b:c:d:e
if (aKey[1] == ":" and aKey[6] == ":"):
# if this feature name already has a prefix (eg
# "C:CLIN:")
featType = aKey[2:7]
i1 = aKey[7:].find(':')
if (i1 < 0):
# if there are no further ':'
firstName = aKey[7:]
secondName = ":::::"
else:
# if there are ...
firstName = aKey[7:7 + i1]
secondName = aKey[7 + i1 + 1:]
print " (a) got to here ... ", featType, aLabel, firstName, secondName
newLabel = "B:" + featType + \
"I(" + aLabel + "|" + firstName + ")" + \
secondName
print " (b) got to here ... ", newLabel
if (newLabel.find("|):") > 0):
print " (a) BAILING !!!! ", newLabel
sys.exit(-1)
else:
# here we really need to have some way to guess whether this
# should be a CLIN or a SAMP feature ...
typeString = "UNK"
for aString in magicStrings:
if (aKey.find(aString) >= 0):
typeString = "CLIN"
if (typeString == "UNK"):
print " defaulting to type SAMP for this feature : <%s> " % (aKey)
typeString = "SAMP"
print " (c) got to here ... ", typeString, aLabel
newLabel = "B:" + typeString + ":" + \
"I(" + aLabel + "|" + aKey + ")"
except:
print " (b) BAILING !!! "
print " ERROR in addIndicatorFeatures ??? ", aLabel, aKey
sys.exit(-1)
# make sure there are no blanks ...
newLabel = tsvIO.replaceBlanks(newLabel, "_")
if (newLabel in allClinDict.keys()):
print " this indicator variable already exists so I will not make a new one ... ", newLabel
continue
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = [0] * numClin
print " ... looping over %d values ... default new value is zero " % (numClin)
for kk in range(numClin):
print kk, allClinDict[aKey][kk], aLabel, type(allClinDict[aKey][kk]), type(aLabel)
if (allClinDict[aKey][kk] == "NA"):
tmpV[kk] = "NA"
elif (str(allClinDict[aKey][kk]).lower() == str(aLabel).lower()):
tmpV[kk] = 1
print " adding new feature : ", newLabel
allClinDict[newLabel] = tmpV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
def addPairwiseIndicatorFeatures(allClinDict):
magicStrings = ["patient", "person", "vital", "surviv", "race", "ethnic",
"prior", "gender", "age_at", "ageat", "radiation", "chemo",
"therapy", "treat", "performance", "days_to_", "daysto",
"year_of", "yearof", "surgical", "recurrence", "pregnancies"]
print " "
print " "
keyList = allClinDict.keys()
keyList.sort()
for aKey in keyList:
if (aKey == "bcr_patient_barcode"):
continue
(keyType, nCount, nNA, nCard, labelList,
labelCount) = miscClin.lookAtKey(allClinDict[aKey])
if (keyType == "NOMINAL"):
# we do this only for categorical features with 3-9 categories
if (nCard > 2 and nCard < 10):
print " "
print " in addPairwiseIndicatorFeatures ... ", aKey, keyType, nCard, labelList, labelCount
for ak in range(len(labelList)):
aLabel = labelList[ak]
for bk in range(ak + 1, len(labelList)):
bLabel = labelList[bk]
print " aLabel=<%s> bLabel=<%s> " % (aLabel, bLabel)
try:
if (aKey[1] == ":" and aKey[6] == ":"):
i1 = aKey[7:].find(':')
if (i1 < 0):
i1 = len(aKey)
i2 = len(aKey)
else:
i1 = i1 + 7
i2 = aKey[(i1 + 1):].find(':')
if (i2 < 0):
i2 = len(aKey)
if (i2 > 0 and i2 < len(aKey)):
newLabel = "B:" + \
aKey[
2:7] + "I(" + aLabel + "," + bLabel + "|" + aKey[7:i1] + ")" + aKey[i2:]
else:
newLabel = "B:" + \
aKey[
2:7] + "I(" + aLabel + "," + bLabel + "|" + aKey[7:i1] + ")" + "::::"
else:
# here we really need to have some way to guess whether this
# should be a CLIN or a SAMP feature ...
typeString = "UNK"
for aString in magicStrings:
if (aKey.find(aString) >= 0):
typeString = "CLIN"
if (typeString == "UNK"):
print " defaulting to type SAMP for this feature : <%s> " % (aKey)
typeString = "SAMP"
newLabel = "B:" + typeString + ":" + \
"I(" + aLabel + "," + bLabel + "|" + aKey + ")" + \
"::::"
except:
print " NOT continuing in addPairwiseIndicatorFeatures !!! ", aLabel, bLabel, aKey
continue
# make sure there are no blanks ...
newLabel = tsvIO.replaceBlanks(newLabel, "_")
print " --> new label: <%s> " % newLabel
if (newLabel in allClinDict.keys()):
print " this indicator variable already exists so I will not make a new one ... ", newLabel
continue
curV = allClinDict[aKey]
numClin = len(curV)
tmpV = ["NA"] * numClin
for kk in range(numClin):
if (allClinDict[aKey][kk].lower() == aLabel.lower()):
tmpV[kk] = 1
elif (allClinDict[aKey][kk].lower() == bLabel.lower()):
tmpV[kk] = 0
print " adding new feature : ", newLabel
# print tmpV
allClinDict[newLabel] = tmpV
return (allClinDict)
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
if __name__ == "__main__":
if ( (len(sys.argv)!=4) and (len(sys.argv)!=5) ):
print " Usage : %s <input TSV> <output TSV> <public/private> [auxName] " % sys.argv[0]
print " ERROR -- bad command line arguments "
sys.exit(-1)
tsvNameIn = sys.argv[1]
tsvNameOut = sys.argv[2]
ppString = sys.argv[3]
if ( len(sys.argv) == 5 ):
auxName = sys.argv[4]
else:
auxName = "aux"
# test out readTSV ...
## tsvName = "coad_read_clinical.27jan.tsv"
print " "
print " ****************************************************************** "
print " reading input file <%s> " % tsvNameIn
allClinDict = tsvIO.readTSV(tsvNameIn)
print " A "
checkBarcodes(allClinDict)
# take a look ...
(naCounts, otherCounts) = miscClin.lookAtClinDict(allClinDict)
if (1):
# remove constant-value keys ...
allClinDict = miscClin.removeConstantKeys(allClinDict)
print " B "
checkBarcodes(allClinDict)
if (1):
# remove uninformative keys ...
allClinDict = miscClin.removeUninformativeKeys(allClinDict)
print " C "
checkBarcodes(allClinDict)
# check the tumor stage based on the other T/N/M definitions, update if possible
# (and if the original setting was "NA")
if (1):
allClinDict = checkTumorStage(allClinDict)
print " D "
checkBarcodes(allClinDict)
# new as of 16aug13 ... vital_status strings are inconsistent between
# 'living' or 'alive' or 'deceased' or 'dead' ...
# --> standard should be "Alive" or "Dead"
if (1):
allClinDict = checkVitalStatus(allClinDict)
print " E "
checkBarcodes(allClinDict)
# new as of 13sep13 ... makig 'age' a continuous feature that
# exactly matches the days_to_birth ...
if (1):
allClinDict = updateAge(allClinDict)
print " F "
checkBarcodes(allClinDict)
# remap some categorical features to numerical features ...
# oh, this shouldn't still be here, should it ??? 15aug2014
if (0):
allClinDict = remapCategoricalFeatures(allClinDict)
print " G "
checkBarcodes(allClinDict)
# add the lymphnodes_positive fraction ...
allClinDict = computeLymphnodesFraction(allClinDict)
print " H "
checkBarcodes(allClinDict)
# fill in some missing information that we have collected from elsewhere
# ...
if (0):
allClinDict = addMissingInfo(allClinDict)
print " I "
checkBarcodes(allClinDict)
# NEW: look at some of the "days_to_" fields and do some fix-ups ...
if (1):
allClinDict = addFollowupInfo(allClinDict)
print " J "
checkBarcodes(allClinDict)
# new as of 04dec13 ... checking that vital_status and various days_to_???
# features are consistent ...
if (1):
allClinDict = checkFollowupInfo(allClinDict)
print " K "
checkBarcodes(allClinDict)
# take a look at the updated dictionary ...
(naCounts, otherCounts) = miscClin.lookAtClinDict(allClinDict)
if (1):
# remove constant-value keys ...
allClinDict = miscClin.removeConstantKeys(allClinDict)
print " L "
checkBarcodes(allClinDict)
if (0):
# removing this ... 02Feb2012 SMR
# filter out keys with too little information ...
# or maybe leave nearly everything in ;-)
categorical_naFracThresh = 0.90
numerical_naFracThresh = 0.90
classSize_minFracThresh = 0.
classSize_maxFracThresh = 0.995
allClinDict = miscClin.filterClinDict(allClinDict,
categorical_naFracThresh,
numerical_naFracThresh,
classSize_minFracThresh,
classSize_maxFracThresh)
print " M "
checkBarcodes(allClinDict)
# try to abbreviate clinical feature strings
allClinDict = abbrevCategStrings(allClinDict)
print " N "
checkBarcodes(allClinDict)
if (0):
# automatically generate indicator features for remaining categorical
# features
allClinDict = addIndicatorFeatures(allClinDict)
print " O "
checkBarcodes(allClinDict)
# new 10Feb2012 : add pairwise indicator features
allClinDict = addPairwiseIndicatorFeatures(allClinDict)
print " P "
checkBarcodes(allClinDict)
# new 09Jan2013 : try to add numeric features that map the non-binary categorical features ...
# as of 06Aug2014, this is only done for "private" runs
if ( ppString == "private" ):
allClinDict = addDerivedFeatures(allClinDict, auxName)
print " Q "
checkBarcodes(allClinDict)
# look at pairwise MI ...
if (0):
print " "
print " ****************************************** "
print " * looking at pairwise Mutual Information * "
print " ****************************************** "
print " "
miscClin.pairwiseMI(allClinDict, "miNetwork.A.13feb12")
# look at the data again and re-pick the 'best' key order ...
(naCounts, otherCounts) = miscClin.lookAtClinDict(allClinDict)
bestKeyOrder = miscClin.getBestKeyOrder(allClinDict, naCounts)
doWriteTSV = 1
if (doWriteTSV):
outName = tsvNameOut
tsvIO.writeTSV_clinical(allClinDict, bestKeyOrder, outName)
if (1):
outName = tsvNameOut[:-4] + ".flipNumeric.tsv"
tsvIO.writeTSV_clinicalFlipNumeric(
allClinDict, bestKeyOrder, outName)
print " "
print " "
print " FINISHED "
print " "
print " "
# -#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
| cancerregulome/gidget | commands/feature_matrix_construction/main/cleanClinTSV.py | Python | mit | 98,495 | 0.004041 |
from ..base import BaseTaskRunnerBackend
class SocketIOBackend(BaseTaskRunnerBackend):
def __init__(self):
from . import sockets
def get_detail_template(self):
return 'task_runners/deployment_detail_socketio.html'
| npardington/fabric-bolt | fabric_bolt/task_runners/socketio/__init__.py | Python | mit | 241 | 0 |
import abc
import json
import codecs
from rest_framework import serializers
from django.utils.translation import ugettext_lazy as _
from rest_framework.parsers import BaseParser
from rest_framework import status
from rest_framework.exceptions import ParseError, APIException
from common.utils import get_logger
logger = get_logger(__file__)
class FileContentOverflowedError(APIException):
status_code = status.HTTP_400_BAD_REQUEST
default_code = 'file_content_overflowed'
default_detail = _('The file content overflowed (The maximum length `{}` bytes)')
class BaseFileParser(BaseParser):
FILE_CONTENT_MAX_LENGTH = 1024 * 1024 * 10
serializer_cls = None
def check_content_length(self, meta):
content_length = int(meta.get('CONTENT_LENGTH', meta.get('HTTP_CONTENT_LENGTH', 0)))
if content_length > self.FILE_CONTENT_MAX_LENGTH:
msg = FileContentOverflowedError.default_detail.format(self.FILE_CONTENT_MAX_LENGTH)
logger.error(msg)
raise FileContentOverflowedError(msg)
@staticmethod
def get_stream_data(stream):
stream_data = stream.read()
stream_data = stream_data.strip(codecs.BOM_UTF8)
return stream_data
@abc.abstractmethod
def generate_rows(self, stream_data):
raise NotImplemented
def get_column_titles(self, rows):
return next(rows)
def convert_to_field_names(self, column_titles):
fields_map = {}
fields = self.serializer_cls().fields
fields_map.update({v.label: k for k, v in fields.items()})
fields_map.update({k: k for k, _ in fields.items()})
field_names = [
fields_map.get(column_title.strip('*'), '')
for column_title in column_titles
]
return field_names
@staticmethod
def _replace_chinese_quote(s):
trans_table = str.maketrans({
'“': '"',
'”': '"',
'‘': '"',
'’': '"',
'\'': '"'
})
return s.translate(trans_table)
@classmethod
def process_row(cls, row):
"""
构建json数据前的行处理
"""
new_row = []
for col in row:
# 转换中文引号
col = cls._replace_chinese_quote(col)
# 列表/字典转换
if isinstance(col, str) and (
(col.startswith('[') and col.endswith(']'))
or
(col.startswith("{") and col.endswith("}"))
):
col = json.loads(col)
new_row.append(col)
return new_row
def process_row_data(self, row_data):
"""
构建json数据后的行数据处理
"""
new_row_data = {}
serializer_fields = self.serializer_cls().fields
for k, v in row_data.items():
if isinstance(v, list) or isinstance(v, dict) or isinstance(v, str) and k.strip() and v.strip():
# 解决类似disk_info为字符串的'{}'的问题
if not isinstance(v, str) and isinstance(serializer_fields[k], serializers.CharField):
v = str(v)
new_row_data[k] = v
return new_row_data
def generate_data(self, fields_name, rows):
data = []
for row in rows:
# 空行不处理
if not any(row):
continue
row = self.process_row(row)
row_data = dict(zip(fields_name, row))
row_data = self.process_row_data(row_data)
data.append(row_data)
return data
def parse(self, stream, media_type=None, parser_context=None):
parser_context = parser_context or {}
try:
view = parser_context['view']
meta = view.request.META
self.serializer_cls = view.get_serializer_class()
except Exception as e:
logger.debug(e, exc_info=True)
raise ParseError('The resource does not support imports!')
self.check_content_length(meta)
try:
stream_data = self.get_stream_data(stream)
rows = self.generate_rows(stream_data)
column_titles = self.get_column_titles(rows)
field_names = self.convert_to_field_names(column_titles)
data = self.generate_data(field_names, rows)
return data
except Exception as e:
logger.error(e, exc_info=True)
raise ParseError('Parse error! ({})'.format(self.media_type))
| skyoo/jumpserver | apps/common/drf/parsers/base.py | Python | gpl-2.0 | 4,553 | 0.001349 |
from argparse import ArgumentParser
import socket
import struct
import sys
import threading
import time
from ._fakeds import FakeDS
__all__ = ["Netconsole", "main", "run"]
def _output_fn(s):
sys.stdout.write(
s.encode(sys.stdout.encoding, errors="replace").decode(sys.stdout.encoding)
)
sys.stdout.write("\n")
class StreamEOF(IOError):
pass
class Netconsole:
"""
Implements the 2018+ netconsole protocol
"""
TAG_ERROR = 11
TAG_INFO = 12
def __init__(self, printfn=_output_fn):
self.frames = {self.TAG_ERROR: self._onError, self.TAG_INFO: self._onInfo}
self.cond = threading.Condition()
self.sock = None
self.sockrfp = None
self.sockwfp = None
self.sockaddr = None
self.running = False
self.printfn = printfn
def start(self, address, port=1741, connect_event=None, block=True):
with self.cond:
if self.running:
raise ValueError("Cannot start without stopping first")
self.sockaddr = (address, port)
self.connect_event = connect_event
self.running = True
self._rt = threading.Thread(
target=self._readThread, name="nc-read-thread", daemon=True
)
self._rt.start()
if block:
self._keepAlive()
else:
self._kt = threading.Thread(
target=self._keepAlive, name="nc-keepalive-thread", daemon=True
)
self._kt.start()
@property
def connected(self):
return self.sockrfp is not None
def stop(self):
with self.cond:
self.running = False
self.cond.notify_all()
self.sock.close()
def _connectionDropped(self):
print(".. connection dropped", file=sys.stderr)
self.sock.close()
with self.cond:
self.sockrfp = None
self.cond.notify_all()
def _keepAliveReady(self):
if not self.running:
return -1
elif not self.connected:
return -2
def _keepAlive(self):
while self.running:
with self.cond:
ret = self.cond.wait_for(self._keepAliveReady, timeout=2.0)
if ret == -1:
return
elif ret == -2:
self._reconnect()
else:
try:
self.sockwfp.write(b"\x00\x00")
self.sockwfp.flush()
except IOError:
self._connectionDropped()
def _readThreadReady(self):
if not self.running:
return -1
return self.sockrfp
def _readThread(self):
while True:
with self.cond:
sockrfp = self.cond.wait_for(self._readThreadReady)
if sockrfp == -1:
return
try:
data = sockrfp.read(self._headerSz)
except IOError:
data = ""
if len(data) != self._headerSz:
self._connectionDropped()
continue
blen, tag = self._header.unpack(data)
blen -= 1
try:
buf = sockrfp.read(blen)
except IOError:
buf = ""
if len(buf) != blen:
self._connectionDropped()
continue
# process the frame
fn = self.frames.get(tag)
if fn:
fn(buf)
else:
print("ERROR: Unknown tag %s; Ignoring..." % tag, file=sys.stderr)
def _reconnect(self):
# returns once the socket is connected or an exit is requested
while self.running:
sys.stderr.write("Connecting to %s:%s..." % self.sockaddr)
try:
sock = socket.create_connection(self.sockaddr, timeout=3.0)
except IOError:
sys.stderr.write(" :(\n")
# don't busywait, just in case
time.sleep(1.0)
continue
else:
sys.stderr.write("OK\n")
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.settimeout(None)
sockrfp = sock.makefile("rb")
sockwfp = sock.makefile("wb")
if self.connect_event:
self.connect_event.set()
with self.cond:
self.sock = sock
self.sockrfp = sockrfp
self.sockwfp = sockwfp
self.cond.notify_all()
break
#
# Message
#
_header = struct.Struct(">Hb")
_headerSz = _header.size
_errorFrame = struct.Struct(">fHHiB")
_errorFrameSz = _errorFrame.size
_infoFrame = struct.Struct(">fH")
_infoFrameSz = _infoFrame.size
_slen = struct.Struct(">H")
_slenSz = _slen.size
def _onError(self, b):
ts, _seq, _numOcc, errorCode, flags = self._errorFrame.unpack_from(b, 0)
details, nidx = self._getStr(b, self._errorFrameSz)
location, nidx = self._getStr(b, nidx)
callStack, _ = self._getStr(b, nidx)
self.printfn(
"[%0.2f] %d %s %s %s" % (ts, errorCode, details, location, callStack)
)
def _getStr(self, b, idx):
sidx = idx + self._slenSz
(blen,) = self._slen.unpack_from(b, idx)
nextidx = sidx + blen
return b[sidx:nextidx].decode("utf-8", errors="replace"), nextidx
def _onInfo(self, b):
ts, _seq = self._infoFrame.unpack_from(b, 0)
msg = b[self._infoFrameSz :].decode("utf-8", errors="replace")
self.printfn("[%0.2f] %s" % (ts, msg))
def run(address, connect_event=None, fakeds=False):
"""
Starts the netconsole loop. Note that netconsole will only send output
if the DS is connected. If you don't have a DS available, the 'fakeds'
flag can be specified to fake a DS connection.
:param address: Address of the netconsole server
:param connect_event: a threading.event object, upon which the 'set'
function will be called when the connection has
succeeded.
:param fakeds: Fake a driver station connection
"""
if fakeds:
ds = FakeDS()
ds.start(address)
nc = Netconsole()
nc.start(address, connect_event=connect_event)
def main():
parser = ArgumentParser()
parser.add_argument("address", help="Address of Robot")
parser.add_argument(
"-f",
"--fakeds",
action="store_true",
default=False,
help="Fake a driver station connection to the robot",
)
args = parser.parse_args()
run(args.address, fakeds=args.fakeds)
| robotpy/pynetconsole | netconsole/netconsole.py | Python | isc | 6,782 | 0.000885 |
# coding=utf-8
__author__ = "zephor"
class TestResultPipeLine(object):
@classmethod
def from_crawler(cls, crawler):
crawler.spider.test_result = []
return cls()
@staticmethod
def process_item(item, spider):
spider.test_result.append(item)
| Zephor5/zspider | zspider/pipelines/test_result.py | Python | mit | 283 | 0 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import define
import tw8836
import spi
def quad_check():
status = spi.status1_read()
if (status & 0x40):
print 'SPI flash is already in QUAD mode'
return define.TRUE
else:
print 'SPI flash is not in QUAD mode yet'
return define.FALSE
def quad_enable():
status = spi.status1_read()
spi.write_enable()
spi.status1_write(status | 0x40)
spi.write_disable()
def quad_disable():
status = spi.status1_read()
spi.write_enable()
spi.status1_write(status & ~0x40)
spi.write_disable()
def four_byte_check():
status = spi.status2_read()
if (status & 0x20):
if define.DEBUG == define.ON:
print 'SPI flash is in 4 Byte mode'
spi.bank_address_register_write(0x80)
return define.TRUE
else:
if define.DEBUG == define.ON:
print 'SPI flash is not in 4 Byte mode'
spi.bank_address_register_write(0x0)
return define.FALSE
def four_byte_enter():
tw8836.write_page(0x04)
tw8836.write(0xF3, (spi.DMA_DEST_CHIPREG << 6) + spi.DMA_CMD_COUNT_1)
tw8836.write(0xF5, 0) #length high
tw8836.write(0xF8, 0) #length middle
tw8836.write(0xF9, 0) #length low
tw8836.write(0xFA, spi.SPICMD_EN4B)
tw8836.write(0xF4, spi.SPI_CMD_OPT_NONE | spi.DMA_START)
def four_byte_exit():
tw8836.write_page(0x04)
tw8836.write(0xF3, (spi.DMA_DEST_CHIPREG << 6) + spi.DMA_CMD_COUNT_1)
tw8836.write(0xF5, 0) #length high
tw8836.write(0xF8, 0) #length middle
tw8836.write(0xF9, 0) #length low
tw8836.write(0xFA, spi.SPICMD_EX4B)
tw8836.write(0xF4, spi.SPI_CMD_OPT_NONE | spi.DMA_START)
def erase_fail_check():
status = spi.security_register_read()
if (status & 0x40):
print 'erase failed'
spi.sr_clear()
return define.TRUE
else:
print 'erase succeed'
return define.FALSE
def dummy_cycles_config(mode, cycles):
print 'dummy_cycles_config in issi.py'
status2_register = spi.status2_read()
print hex(status2_register)
| lgnq/RPI8836 | issi.py | Python | lgpl-3.0 | 2,227 | 0.015267 |
import csv
from datetime import datetime
import re
from StringIO import StringIO
from PIL import Image
import formencode
from pylons import tmpl_context as c
from pylons.i18n import _
from webhelpers.html import literal
from sqlalchemy import func
from adhocracy import config
from adhocracy.lib.auth import can
from adhocracy.lib.unicode import UnicodeDictReader
FORBIDDEN_NAMES = ["www", "static", "mail", "edit", "create", "settings",
"join", "leave", "control", "test", "support", "page",
"proposal", "wiki", "blog", "proposals", "admin", "dl",
"downloads", "stats", "branch", "merge", "pull", "push",
"hg", "git", "adhocracy", "user", "openid", "auth", "watch",
"poll", "delegation", "event", "comment", "root", "search",
"tag", "svn", "trac", "lists", "list", "new", "update",
"variant", "provision", "untag", "code", "sso", "velruse"]
VALIDUSER = re.compile(r"^[a-zA-Z0-9_\-]{3,255}$")
VALIDVARIANT = re.compile(r"^[\w\-_ ]{1,255}$", re.U)
TIME = re.compile(r"\d{1,2}.\d{1,2}.\d{4}")
class UniqueUsername(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import meta, User
if not value or not isinstance(value, basestring):
raise formencode.Invalid(
_('No username is given'),
value, state)
if len(value.strip()) < 3:
raise formencode.Invalid(
_('Username is too short'),
value, state)
if not VALIDUSER.match(value) or value in FORBIDDEN_NAMES:
raise formencode.Invalid(
_('The username is invalid'),
value, state)
if meta.Session.query(User.user_name).filter(
func.lower(User.user_name) == value.lower()
).count():
raise formencode.Invalid(
_('That username already exists'),
value, state)
return value
class UniqueEmail(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import User
if User.all_q()\
.filter(func.lower(User.email) == value.lower()).count():
raise formencode.Invalid(
_('That email is already registered'),
value, state)
return value
class UniqueOtherEmail(formencode.FancyValidator):
"""
Check if email is unused or belongs to the current user.
"""
def _to_python(self, value, state):
if (c.user is not None and c.user.email is not None
and c.user.email.lower() == value.lower()):
return value
from adhocracy.model import User
if User.all_q()\
.filter(func.lower(User.email) == value.lower()).count():
raise formencode.Invalid(
_('That email is already used by another account'),
value, state)
return value
class ValidLocale(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy import i18n
if value in i18n.LOCALE_STRINGS:
return value
else:
raise formencode.Invalid(_('Invalid locale choice'), value, state)
class ValidDate(formencode.FancyValidator):
def _to_python(self, value, state):
if not TIME.match(value):
raise formencode.Invalid(
_('Invalid date, expecting DD.MM.YYYY'),
value, state)
try:
return datetime.strptime(value, "%d.%m.%Y")
except ValueError:
raise formencode.Invalid(
_('Invalid date, expecting DD.MM.YYYY'),
value, state)
return value
class ValidHTMLColor(formencode.validators.Regex):
regex = r'^#[0-9a-fA-F]{1,6}'
def to_python(self, value, state):
try:
super(ValidHTMLColor, self).to_python(value, state)
except formencode.Invalid:
raise formencode.Invalid(
_("Please enter a html color code like '#f0f0f0'. "
"'%(value)' is not a valid color code."), value, state)
return value
class UniqueInstanceKey(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Instance
if not value:
raise formencode.Invalid(
_('No instance key is given'),
value, state)
if not Instance.INSTANCE_KEY.match(value) or value in FORBIDDEN_NAMES:
raise formencode.Invalid(
_('The instance key is invalid'),
value, state)
if Instance.find(value):
raise formencode.Invalid(
_('An instance with that key already exists'),
value, state)
return value
class StaticPageKey(formencode.FancyValidator):
def to_python(self, value, state):
from adhocracy.lib import staticpage
if not value:
raise formencode.Invalid(
_('No static key is given'),
value, state)
if not staticpage.STATICPAGE_KEY.match(value) or value in ['new']:
raise formencode.Invalid(
_('The static key is invalid'),
value, state)
return value
class ValidDelegateable(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Delegateable
delegateable = Delegateable.find(value)
if not delegateable:
raise formencode.Invalid(
_("No entity with ID '%s' exists") % value,
value, state)
return delegateable
class ValidProposal(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Proposal
proposal = Proposal.find(value)
if not proposal:
raise formencode.Invalid(
_("No proposal with ID '%s' exists") % value,
value, state)
return proposal
class ValidInstanceGroup(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Group
group = Group.by_code(value)
if not group:
raise formencode.Invalid(
_("No group with ID '%s' exists") % value,
value, state)
if not group.is_instance_group():
raise formencode.Invalid(
_("Group '%s' is no instance group") % group.code,
value, state)
return group
class ContainsChar(formencode.validators.Regex):
regex = r"[a-zA-Z]"
def to_python(self, value, state):
try:
super(ContainsChar, self).to_python(value, state)
except formencode.Invalid:
raise formencode.Invalid(_("At least one character is required"),
value, state)
return value
class ValidBadgeInstance(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Instance
if can.badge.manage_global() or can.badge.edit_global():
if value:
instance = Instance.find(value)
if instance is None:
raise AssertionError("Could not find instance %s" % value)
return instance
return None
elif can.badge.manage_instance() or can.badge.edit_instance():
instance = Instance.find(value)
if instance is not None and instance == c.instance:
return instance
raise formencode.Invalid(
_("You're not allowed to edit global badges"),
value, state)
class ValidUserBadge(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import UserBadge
badge = UserBadge.by_id(value, instance_filter=False)
if badge is None or badge.instance not in [None, c.instance]:
raise formencode.Invalid(
_("No Badge ID '%s' exists") % value,
value, state)
return badge
class ValidUserBadges(formencode.FancyValidator):
""" Check for a set of user badges, inputted by ID """
accept_iterator = True
def __init__(self, not_empty=False):
super(formencode.FancyValidator, self).__init__()
self.not_empty = not_empty
if not not_empty:
self.if_missing = []
def _to_python(self, value, state):
from adhocracy.model import UserBadge
if value is None:
if self.not_empty:
raise formencode.Invalid(_('No badges selected'), value, state)
return []
if isinstance(value, (str, unicode)):
value = [value]
if len(value) != len(set(value)):
raise formencode.Invalid(
_("Duplicates in input set of user badge IDs"),
value, state)
if self.not_empty and not value:
raise formencode.Invalid(_('No badges selected'), value, state)
badges = UserBadge.findall_by_ids(value)
if len(badges) != len(value):
missing = set(value).difference(b.id for b in badges)
raise formencode.Invalid(
_("Could not find badges %s") % ','.join(map(str, missing)),
value, state)
return badges
class ValidUserBadgeNames(formencode.FancyValidator):
def __init__(self, instance_filter=True, **kwargs):
super(formencode.FancyValidator, self).__init__(**kwargs)
self.instance_filter = instance_filter
def _to_python(self, value, state):
from adhocracy.model import UserBadge
if value is None or value == '':
return []
labels = [l.strip() for l in value.split(',')]
if len(labels) != len(set(labels)):
raise formencode.Invalid(
_("Duplicates in input set of user badge labels"),
value, state)
badges = set()
missing = set()
for label in labels:
badge = UserBadge.find(label, instance_filter=self.instance_filter)
if badge is None:
missing.add(label)
else:
badges.add(badge)
if missing:
raise formencode.Invalid(
_("Could not find badges %s") % ','.join(missing),
value, state)
else:
return badges
class ValidInstanceBadge(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import InstanceBadge
try:
value = int(value)
except ValueError:
pass
badge = InstanceBadge.by_id(value, instance_filter=False)
if badge is None or badge.instance not in [None, c.instance]:
raise formencode.Invalid(
_("No Badge ID '%s' exists") % value,
value, state)
return badge
class ValidDelegateableBadge(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import DelegateableBadge
try:
value = int(value)
except:
pass
badge = DelegateableBadge.by_id(value, instance_filter=False)
if badge is None or badge.instance not in [None, c.instance]:
raise formencode.Invalid(
_("No Badge ID '%s' exists") % value,
value, state)
return badge
class ValidThumbnailBadge(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import ThumbnailBadge
try:
value = int(value)
except:
pass
badge = ThumbnailBadge.by_id(value, instance_filter=False)
if badge is None or badge.instance not in [None, c.instance]:
raise formencode.Invalid(
_("No Badge ID '%s' exists") % value,
value, state)
return badge
class ValidCategoryBadge(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import CategoryBadge
try:
value = int(value)
except:
pass
badge = CategoryBadge.by_id(value, instance_filter=False)
if badge is None:
raise formencode.Invalid(
_("No Badge ID '%s' exists") % value,
value, state)
if badge.instance is None and c.instance is not None\
and c.instance.hide_global_categories:
raise formencode.Invalid(
_("Cannot use global category %s in this instance") % value,
value, state)
if badge.instance not in [None, c.instance]:
raise formencode.Invalid(
_("Badge with ID '%s' not valid in this instance") % value,
value, state)
return badge
class ValidParentCategory(formencode.validators.FormValidator):
def validate_python(self, field_dict, state):
if (field_dict['parent'] is not None and
field_dict['parent'].instance is not field_dict['instance']):
msg = _("Parent and child category instance have to match")
raise formencode.Invalid(
msg, field_dict, state,
error_dict={'parent': msg}
)
else:
return field_dict
class ValidateNoCycle(formencode.validators.FormValidator):
def validate_python(self, field_dict, state):
def parent_okay(category):
if category is None:
# no cycle
return True
elif category == field_dict['id']:
# cycle
return False
else:
return parent_okay(category.parent)
if parent_okay(field_dict['parent']):
return field_dict
else:
msg = _('You shall not create cycles!')
raise formencode.Invalid(
msg, field_dict, state,
error_dict={'parent': msg}
)
class MaybeMilestone(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Milestone
try:
return Milestone.find(value)
except Exception:
return None
class ValidRevision(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Revision
revision = Revision.find(value)
if not revision:
raise formencode.Invalid(
_("No revision with ID '%s' exists") % value,
value, state)
return revision
class ValidComment(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Comment
comment = Comment.find(value)
if not comment:
raise formencode.Invalid(
_("No comment with ID '%s' exists") % value,
value, state)
return comment
class ValidWatch(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Watch
watch = Watch.by_id(value)
if not watch:
raise formencode.Invalid(
_("No watchlist entry with ID '%s' exists") % value,
value, state)
return watch
class ValidRef(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import refs
try:
entity = refs.from_url(value)
if not entity:
raise TypeError()
return entity
except:
raise formencode.Invalid(_("Invalid reference"), value, state)
class ExistingUserName(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import User
user = User.find(value)
if not user:
raise formencode.Invalid(
_("No user with the user name '%s' exists") % value,
value, state)
return user
class ValidTagging(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Tagging
tagging = Tagging.find(value)
if not tagging:
raise formencode.Invalid(
_("No tagging with ID '%s' exists") % value,
value, state)
return tagging
class ValidTag(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Tag
tag = Tag.find(value)
if not tag:
raise formencode.Invalid(
_("No tag with ID '%s' exists") % value,
value, state)
return tag
class ValidText(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Text
text = Text.find(value)
if not text:
raise formencode.Invalid(
_("No text with ID '%s' exists") % value,
value, state)
return text
class ValidPage(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Page
page = Page.find(value)
if not page:
raise formencode.Invalid(_("No page '%s' exists") % value,
value, state)
return page
class ValidPageFunction(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.model import Page
function = value.lower().strip()
if function not in Page.FUNCTIONS:
raise formencode.Invalid(_("Invalid page function: %s") % value,
value, state)
return function
class VariantName(formencode.FancyValidator):
def _to_python(self, value, state):
from adhocracy.lib.text import variant_normalize
var = variant_normalize(value)
if not var or len(var) < 2:
raise formencode.Invalid(_("No name is given."),
value, state)
# if (var.lower() in FORBIDDEN_NAMES or not
# VALIDVARIANT.match(var.lower())):
if var.lower() in FORBIDDEN_NAMES:
raise formencode.Invalid(_("Invalid name: %s") % value,
value, state)
try:
int(var)
raise formencode.Invalid(
_("Name cannot be purely numeric: %s") % value,
value, state)
except:
return var
class ValidTitle(formencode.validators.String):
def __init__(self, unused_label=False):
super(ValidTitle, self).__init__(min=3, max=254, not_empty=True)
self.unused_label = unused_label
def _to_python(self, value, state):
from adhocracy.model import Page
from adhocracy.lib.text import title2alias
value = super(ValidTitle, self)._to_python(value, state)
if self.unused_label and not Page.unused_label(title2alias(value)):
raise formencode.Invalid(
_("An entry with this title already exists"), value, state)
if not value or len(value) < 2:
raise formencode.Invalid(_("No page name is given."), value, state)
if value.lower() in FORBIDDEN_NAMES:
raise formencode.Invalid(_("Invalid page name: %s") % value,
value, state)
try:
int(value)
raise formencode.Invalid(
_("Variant name cannot be purely numeric: %s") % value,
value, state)
except:
return value
class ValidProposalTitle(ValidTitle):
def validate_python(self, value, state):
value = super(ValidProposalTitle, self)._to_python(value, state)
# must be a valid variant name
try:
variant_name_validator = VariantName()
variant_name_validator._to_python(value, state)
except formencode.Invalid as e:
raise formencode.Invalid(e.msg, value, state)
return value
class ProposalMessageNoRecipientGroup(formencode.validators.FormValidator):
def validate_python(self, field_dict, state):
if (not field_dict.get('creators', False) and
not field_dict.get('supporters', False) and
not field_dict.get('opponents', False)):
msg = _(u"Please select at least one recipient group")
raise formencode.Invalid(
msg, field_dict, state,
error_dict={'creators': msg}
)
USER_NAME = 'user_name'
DISPLAY_NAME = 'display_name'
EMAIL = 'email'
USER_BADGES = 'user_badges'
USERNAME_VALIDATOR = formencode.All(
formencode.validators.PlainText(not_empty=True),
UniqueUsername(),
ContainsChar())
EMAIL_VALIDATOR = formencode.All(formencode.validators.Email(not_empty=True),
UniqueEmail())
class UsersCSV(formencode.FancyValidator):
def to_python(self, value, state=None):
if state is None:
global_admin = False
else:
global_admin = getattr(state, u'global_admin', False)
fieldnames = [USER_NAME, DISPLAY_NAME, EMAIL, USER_BADGES]
errors = []
items = []
self.usernames = {}
self.emails = {}
self.duplicates = False
value = value.encode('utf-8')
reader = UnicodeDictReader(StringIO(value), fieldnames=fieldnames)
try:
for item in reader:
error_list, cleaned_item = self._check_item(
item, reader.line_num, global_admin=global_admin)
if error_list:
errors.append((reader.line_num, error_list))
if not errors:
items.append(cleaned_item)
except csv.Error, E:
line_content = value.split('\n')[reader.line_num]
msg = _('Error "%(error)s" while reading line '
'<pre><i>%(line_content)s</i></pre>') % dict(
line_content=line_content,
error=str(E))
errors.append((reader.line_num + 1, [msg]))
if errors or self.duplicates:
error_msg = _('The following errors occured while reading '
'the list of users: <br />%s')
line_error_messages = []
for (line, messages) in errors:
line_error_messages.append(
_('Line %s: %s') % (line, ', '.join(messages)))
# Insert messages for duplicate emails and usernames
self._insert_duplicate_messages(
line_error_messages,
self.emails,
_('Email %s is used multiple times'))
self._insert_duplicate_messages(
line_error_messages,
self.usernames,
_('Username %s is used multiple times'))
error_msg = error_msg % ('<br />'.join(line_error_messages))
raise formencode.Invalid(literal(error_msg), value, state)
else:
return items
def _insert_duplicate_messages(self, line_error_messages, duplicate_dict,
msg_template):
for (value, lines) in duplicate_dict.items():
if len(lines) > 1:
lines = [str(line) for line in lines]
line_error_messages.append(
_('Lines %s: %s') % (
', '.join(lines),
msg_template % value))
def _check_item(self, item, line, global_admin=False):
error_list = []
user_name = item.get(USER_NAME, '').strip()
email = item.get(EMAIL, '')
badges = item.get(USER_BADGES, '')
if email is not None:
email = email.strip()
validated = {}
USERBADGE_VALIDATOR = ValidUserBadgeNames(
not_empty=False, if_empty=[],
instance_filter=(not global_admin))
for (validator, value) in ((USERNAME_VALIDATOR, user_name),
(EMAIL_VALIDATOR, email),
(USERBADGE_VALIDATOR, badges),
):
try:
validated[validator] = validator.to_python(value, None)
except formencode.Invalid, E:
error_list.append(u'%s (%s)' % (E.msg, value))
emails = self.emails.setdefault(email, [])
emails.append(line)
usernames = self.usernames.setdefault(user_name.strip(), [])
usernames.append(line)
if len(emails) > 1 or len(usernames) > 1:
self.duplicates = True
cleaned_item = item.copy()
cleaned_item.update({USER_NAME: user_name,
EMAIL: email,
USER_BADGES: validated.get(validator),
})
return error_list, cleaned_item
class ContainsEMailPlaceholders(formencode.FancyValidator):
def _to_python(self, value, state):
required = ['{url}', '{user_name}', '{password}']
missing = []
for s in required:
if s not in value:
missing.append(s)
if missing != []:
raise formencode.Invalid(
_('You need to insert the following placeholders into '
'the email text so we can insert enough information '
'for the user: %s') % ', '.join(missing),
value, state)
return value
class ValidImageFileUpload(formencode.FancyValidator):
max_size = 5 * 1024 * 1024
def _to_python(self, value, state):
payload = value.file.read(self.max_size + 1)
if len(payload) > 0:
try:
value.file.seek(0)
im = Image.open(value.file)
value.file.seek(0)
del im
except IOError:
raise formencode.Invalid(_("This is not a valid image file"),
value, state)
return value
class ValidFileUpload(formencode.FancyValidator):
max_size = 1024 * 1024
def _to_python(self, value, state):
payload = value.file.read(self.max_size)
value.file.seek(0)
if len(payload) == self.max_size:
raise formencode.Invalid(_("The file is too big (>1MB)"),
value, state)
return value
class MessageableInstances(formencode.FancyValidator):
"""
Check if the given instance can be mass messaged by the current user.
"""
accept_iterator = True
def _to_python(self, value, state):
if not value:
raise formencode.Invalid(
_('Please select at least one instance'), value, state)
if not isinstance(value, list):
value = [value]
from adhocracy.controllers.massmessage import MassmessageController
allowed_ids = (i.id for i in
MassmessageController._get_allowed_instances(c.user))
if any(int(i) not in allowed_ids for i in value):
raise formencode.Invalid(
_('Disallowed instance selected'), value, state)
return value
class ProposalSortOrder(formencode.validators.OneOf):
def __init__(self, **kwargs):
from adhocracy.lib.pager import PROPOSAL_SORTS
super(ProposalSortOrder, self).__init__(
[''] +
[
v.value
for g in PROPOSAL_SORTS.by_group.values()
for v in g
])
def _to_python(self, value, state):
return value if value else None
class OptionalAttributes(formencode.validators.FormValidator):
def validate_python(self, field_dict, state):
optional_attributes = config.get_optional_user_attributes()
error_dict = {}
for (key, type_, converter, label, allowed) in optional_attributes:
if key not in field_dict:
continue
value = field_dict[key]
if value and not isinstance(value, type_):
try:
value = converter(value)
except:
error_dict[key] = _(u'Invalid value')
continue
field_dict[key] = value
if allowed is not None:
if value not in [a['value'] for a in allowed]:
error_dict[key] = _(u'Invalid choice')
continue
if error_dict:
raise formencode.Invalid(u'_', field_dict, state,
error_dict=error_dict)
return field_dict
class NotAllFalse(formencode.validators.FormValidator):
def __init__(self, keys, msg, *args, **kwargs):
super(NotAllFalse, self).__init__(*args, **kwargs)
self.keys = keys
self.msg = msg
def validate_python(self, field_dict, state):
if all(not field_dict.get(key, False) for key in self.keys):
raise formencode.Invalid(
self.msg, field_dict, state,
error_dict={self.keys[0]: self.msg}
)
class CaptchasDotNetCaptcha(formencode.FancyValidator):
def __init__(self, session, captchasdotnet):
super(formencode.FancyValidator, self).__init__(not_empty=True)
self.session = session
self.captchasdotnet = captchasdotnet
def _to_python(self, value, state):
random = self.session.get('captchasdotnet_random')
cap = self.captchasdotnet.get_captchasdotnet()
if not cap.verify(value, random):
raise formencode.Invalid(_(u'Incorrect. Try again.'),
value, state)
class TermsCheckValidator(formencode.validators.FancyValidator):
field_name = 'accept_terms'
def validate_python(self, field_dict, state):
if self.field_name not in field_dict:
msg = _(u'Please accept the terms of service')
raise formencode.Invalid(
msg, field_dict, state,
error_dict={self.field_name: msg}
)
else:
return field_dict
| DanielNeugebauer/adhocracy | src/adhocracy/forms/common.py | Python | agpl-3.0 | 30,325 | 0.000297 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Persistent identifier minters."""
from __future__ import absolute_import, print_function
from .providers import CDSRecordIdProvider
def recid_minter(record_uuid, data):
"""Mint record identifiers."""
assert 'recid' not in data
provider = CDSRecordIdProvider.create(
object_type='rec', object_uuid=record_uuid)
data['recid'] = int(provider.pid.pid_value)
return provider.pid
| drjova/cds-demosite | cds/modules/records/minters.py | Python | gpl-2.0 | 1,381 | 0 |
"""
sentry.rules.actions
~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from .base import * # NOQA
| jokey2k/sentry | src/sentry/rules/actions/__init__.py | Python | bsd-3-clause | 241 | 0 |
import datetime
from operator import attrgetter
from .models import (
Country, Person, Group, Membership, Friendship, Article,
ArticleTranslation, ArticleTag, ArticleIdea, NewsArticle)
from django.test import TestCase, skipUnlessDBFeature
from django.utils.translation import activate
from django.core.exceptions import FieldError
from django import forms
# Note that these tests are testing internal implementation details.
# ForeignObject is not part of public API.
class MultiColumnFKTests(TestCase):
def setUp(self):
# Creating countries
self.usa = Country.objects.create(name="United States of America")
self.soviet_union = Country.objects.create(name="Soviet Union")
Person()
# Creating People
self.bob = Person()
self.bob.name = 'Bob'
self.bob.person_country = self.usa
self.bob.save()
self.jim = Person.objects.create(name='Jim', person_country=self.usa)
self.george = Person.objects.create(name='George', person_country=self.usa)
self.jane = Person.objects.create(name='Jane', person_country=self.soviet_union)
self.mark = Person.objects.create(name='Mark', person_country=self.soviet_union)
self.sam = Person.objects.create(name='Sam', person_country=self.soviet_union)
# Creating Groups
self.kgb = Group.objects.create(name='KGB', group_country=self.soviet_union)
self.cia = Group.objects.create(name='CIA', group_country=self.usa)
self.republican = Group.objects.create(name='Republican', group_country=self.usa)
self.democrat = Group.objects.create(name='Democrat', group_country=self.usa)
def test_get_succeeds_on_multicolumn_match(self):
# Membership objects have access to their related Person if both
# country_ids match between them
membership = Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
person = membership.person
self.assertEqual((person.id, person.name), (self.bob.id, "Bob"))
def test_get_fails_on_multicolumn_mismatch(self):
# Membership objects returns DoesNotExist error when the there is no
# Person with the same id and country_id
membership = Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jane.id, group_id=self.cia.id)
self.assertRaises(Person.DoesNotExist, getattr, membership, 'person')
def test_reverse_query_returns_correct_result(self):
# Creating a valid membership because it has the same country has the person
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
# Creating an invalid membership because it has a different country has the person
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.bob.id,
group_id=self.republican.id)
self.assertQuerysetEqual(
self.bob.membership_set.all(), [
self.cia.id
],
attrgetter("group_id")
)
def test_query_filters_correctly(self):
# Creating a to valid memberships
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id, group_id=self.cia.id)
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
# Creating an invalid membership
Membership.objects.create(membership_country_id=self.soviet_union.id,
person_id=self.george.id, group_id=self.cia.id)
self.assertQuerysetEqual(
Membership.objects.filter(person__name__contains='o'), [
self.bob.id
],
attrgetter("person_id")
)
def test_reverse_query_filters_correctly(self):
timemark = datetime.datetime.utcnow()
timedelta = datetime.timedelta(days=1)
# Creating a to valid memberships
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id, date_joined=timemark - timedelta)
Membership.objects.create(
membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id, date_joined=timemark + timedelta)
# Creating an invalid membership
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.george.id,
group_id=self.cia.id, date_joined=timemark + timedelta)
self.assertQuerysetEqual(
Person.objects.filter(membership__date_joined__gte=timemark), [
'Jim'
],
attrgetter('name')
)
def test_forward_in_lookup_filters_correctly(self):
Membership.objects.create(membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id)
Membership.objects.create(membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
# Creating an invalid membership
Membership.objects.create(
membership_country_id=self.soviet_union.id, person_id=self.george.id,
group_id=self.cia.id)
self.assertQuerysetEqual(
Membership.objects.filter(person__in=[self.george, self.jim]), [
self.jim.id,
],
attrgetter('person_id')
)
self.assertQuerysetEqual(
Membership.objects.filter(person__in=Person.objects.filter(name='Jim')), [
self.jim.id,
],
attrgetter('person_id')
)
def test_double_nested_query(self):
m1 = Membership.objects.create(membership_country_id=self.usa.id, person_id=self.bob.id,
group_id=self.cia.id)
m2 = Membership.objects.create(membership_country_id=self.usa.id, person_id=self.jim.id,
group_id=self.cia.id)
Friendship.objects.create(from_friend_country_id=self.usa.id, from_friend_id=self.bob.id,
to_friend_country_id=self.usa.id, to_friend_id=self.jim.id)
self.assertQuerysetEqual(Membership.objects.filter(
person__in=Person.objects.filter(
from_friend__in=Friendship.objects.filter(
to_friend__in=Person.objects.all()))),
[m1], lambda x: x)
self.assertQuerysetEqual(Membership.objects.exclude(
person__in=Person.objects.filter(
from_friend__in=Friendship.objects.filter(
to_friend__in=Person.objects.all()))),
[m2], lambda x: x)
def test_select_related_foreignkey_forward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(1):
people = [m.person for m in Membership.objects.select_related('person').order_by('pk')]
normal_people = [m.person for m in Membership.objects.all().order_by('pk')]
self.assertEqual(people, normal_people)
def test_prefetch_foreignkey_forward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
people = [
m.person for m in Membership.objects.prefetch_related('person').order_by('pk')]
normal_people = [m.person for m in Membership.objects.order_by('pk')]
self.assertEqual(people, normal_people)
def test_prefetch_foreignkey_reverse_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
membership_sets = [
list(p.membership_set.all())
for p in Person.objects.prefetch_related('membership_set').order_by('pk')]
normal_membership_sets = [list(p.membership_set.all())
for p in Person.objects.order_by('pk')]
self.assertEqual(membership_sets, normal_membership_sets)
def test_m2m_through_forward_returns_valid_members(self):
# We start out by making sure that the Group 'CIA' has no members.
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.cia)
# Let's check to make sure that it worked. Bob and Jim should be members of the CIA.
self.assertQuerysetEqual(
self.cia.members.all(), [
'Bob',
'Jim'
], attrgetter("name")
)
def test_m2m_through_reverse_returns_valid_members(self):
# We start out by making sure that Bob is in no groups.
self.assertQuerysetEqual(
self.bob.groups.all(),
[]
)
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.bob,
group=self.republican)
# Bob should be in the CIA and a Republican
self.assertQuerysetEqual(
self.bob.groups.all(), [
'CIA',
'Republican'
], attrgetter("name")
)
def test_m2m_through_forward_ignores_invalid_members(self):
# We start out by making sure that the Group 'CIA' has no members.
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
# Something adds jane to group CIA but Jane is in Soviet Union which isn't CIA's country
Membership.objects.create(membership_country=self.usa, person=self.jane, group=self.cia)
# There should still be no members in CIA
self.assertQuerysetEqual(
self.cia.members.all(),
[]
)
def test_m2m_through_reverse_ignores_invalid_members(self):
# We start out by making sure that Jane has no groups.
self.assertQuerysetEqual(
self.jane.groups.all(),
[]
)
# Something adds jane to group CIA but Jane is in Soviet Union which isn't CIA's country
Membership.objects.create(membership_country=self.usa, person=self.jane, group=self.cia)
# Jane should still not be in any groups
self.assertQuerysetEqual(
self.jane.groups.all(),
[]
)
def test_m2m_through_on_self_works(self):
self.assertQuerysetEqual(
self.jane.friends.all(),
[]
)
Friendship.objects.create(
from_friend_country=self.jane.person_country, from_friend=self.jane,
to_friend_country=self.george.person_country, to_friend=self.george)
self.assertQuerysetEqual(
self.jane.friends.all(),
['George'], attrgetter("name")
)
def test_m2m_through_on_self_ignores_mismatch_columns(self):
self.assertQuerysetEqual(self.jane.friends.all(), [])
# Note that we use ids instead of instances. This is because instances on ForeignObject
# properties will set all related field off of the given instance
Friendship.objects.create(
from_friend_id=self.jane.id, to_friend_id=self.george.id,
to_friend_country_id=self.jane.person_country_id,
from_friend_country_id=self.george.person_country_id)
self.assertQuerysetEqual(self.jane.friends.all(), [])
def test_prefetch_related_m2m_foward_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
members_lists = [list(g.members.all())
for g in Group.objects.prefetch_related('members')]
normal_members_lists = [list(g.members.all()) for g in Group.objects.all()]
self.assertEqual(members_lists, normal_members_lists)
def test_prefetch_related_m2m_reverse_works(self):
Membership.objects.create(membership_country=self.usa, person=self.bob, group=self.cia)
Membership.objects.create(membership_country=self.usa, person=self.jim, group=self.democrat)
with self.assertNumQueries(2):
groups_lists = [list(p.groups.all()) for p in Person.objects.prefetch_related('groups')]
normal_groups_lists = [list(p.groups.all()) for p in Person.objects.all()]
self.assertEqual(groups_lists, normal_groups_lists)
def test_translations(self):
activate('fi')
a1 = Article.objects.create(pub_date=datetime.date.today())
at1_fi = ArticleTranslation(article=a1, lang='fi', title='Otsikko', body='Diipadaapa')
at1_fi.save()
at2_en = ArticleTranslation(article=a1, lang='en', title='Title', body='Lalalalala')
at2_en.save()
with self.assertNumQueries(1):
fetched = Article.objects.select_related('active_translation').get(
active_translation__title='Otsikko')
self.assertEqual(fetched.active_translation.title, 'Otsikko')
a2 = Article.objects.create(pub_date=datetime.date.today())
at2_fi = ArticleTranslation(article=a2, lang='fi', title='Atsikko', body='Diipadaapa',
abstract='dipad')
at2_fi.save()
a3 = Article.objects.create(pub_date=datetime.date.today())
at3_en = ArticleTranslation(article=a3, lang='en', title='A title', body='lalalalala',
abstract='lala')
at3_en.save()
# Test model initialization with active_translation field.
a3 = Article(id=a3.id, pub_date=a3.pub_date, active_translation=at3_en)
a3.save()
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None)),
[a1, a3])
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None,
active_translation__pk__isnull=False)),
[a1])
activate('en')
self.assertEqual(
list(Article.objects.filter(active_translation__abstract=None)),
[a1, a2])
def test_foreign_key_raises_informative_does_not_exist(self):
referrer = ArticleTranslation()
with self.assertRaisesMessage(Article.DoesNotExist, 'ArticleTranslation has no article'):
referrer.article
def test_foreign_key_related_query_name(self):
a1 = Article.objects.create(pub_date=datetime.date.today())
ArticleTag.objects.create(article=a1, name="foo")
self.assertEqual(Article.objects.filter(tag__name="foo").count(), 1)
self.assertEqual(Article.objects.filter(tag__name="bar").count(), 0)
with self.assertRaises(FieldError):
Article.objects.filter(tags__name="foo")
def test_many_to_many_related_query_name(self):
a1 = Article.objects.create(pub_date=datetime.date.today())
i1 = ArticleIdea.objects.create(name="idea1")
a1.ideas.add(i1)
self.assertEqual(Article.objects.filter(idea_things__name="idea1").count(), 1)
self.assertEqual(Article.objects.filter(idea_things__name="idea2").count(), 0)
with self.assertRaises(FieldError):
Article.objects.filter(ideas__name="idea1")
def test_inheritance(self):
activate("fi")
na = NewsArticle.objects.create(pub_date=datetime.date.today())
ArticleTranslation.objects.create(
article=na, lang="fi", title="foo", body="bar")
self.assertQuerysetEqual(
NewsArticle.objects.select_related('active_translation'),
[na], lambda x: x
)
with self.assertNumQueries(1):
self.assertEqual(
NewsArticle.objects.select_related(
'active_translation')[0].active_translation.title,
"foo")
@skipUnlessDBFeature('has_bulk_insert')
def test_batch_create_foreign_object(self):
""" See: https://code.djangoproject.com/ticket/21566 """
objs = [Person(name="abcd_%s" % i, person_country=self.usa) for i in range(0, 5)]
Person.objects.bulk_create(objs, 10)
class FormsTests(TestCase):
# ForeignObjects should not have any form fields, currently the user needs
# to manually deal with the foreignobject relation.
class ArticleForm(forms.ModelForm):
class Meta:
model = Article
fields = '__all__'
def test_foreign_object_form(self):
# A very crude test checking that the non-concrete fields do not get form fields.
form = FormsTests.ArticleForm()
self.assertIn('id_pub_date', form.as_table())
self.assertNotIn('active_translation', form.as_table())
form = FormsTests.ArticleForm(data={'pub_date': str(datetime.date.today())})
self.assertTrue(form.is_valid())
a = form.save()
self.assertEqual(a.pub_date, datetime.date.today())
form = FormsTests.ArticleForm(instance=a, data={'pub_date': '2013-01-01'})
a2 = form.save()
self.assertEqual(a.pk, a2.pk)
self.assertEqual(a2.pub_date, datetime.date(2013, 1, 1))
| helenst/django | tests/foreign_object/tests.py | Python | bsd-3-clause | 18,027 | 0.003495 |
"""Copyright 2008 Orbitz WorldWide
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License."""
# Django settings for graphite project.
# DO NOT MODIFY THIS FILE DIRECTLY - use local_settings.py instead
import sys, os
from os.path import abspath, dirname, join
from warnings import warn
GRAPHITE_WEB_APP_SETTINGS_LOADED = False
WEBAPP_VERSION = '0.10.0-alpha'
DEBUG = False
JAVASCRIPT_DEBUG = False
# Filesystem layout
WEB_DIR = dirname( abspath(__file__) )
WEBAPP_DIR = dirname(WEB_DIR)
GRAPHITE_ROOT = dirname(WEBAPP_DIR)
# Initialize additional path variables
# Defaults for these are set after local_settings is imported
CONTENT_DIR = ''
CSS_DIR = ''
CONF_DIR = ''
DASHBOARD_CONF = ''
GRAPHTEMPLATES_CONF = ''
STORAGE_DIR = ''
WHITELIST_FILE = ''
INDEX_FILE = ''
LOG_DIR = ''
CERES_DIR = ''
WHISPER_DIR = ''
RRD_DIR = ''
STANDARD_DIRS = []
CLUSTER_SERVERS = []
# Cluster settings
CLUSTER_SERVERS = []
REMOTE_FIND_TIMEOUT = 3.0
REMOTE_FETCH_TIMEOUT = 6.0
REMOTE_RETRY_DELAY = 60.0
REMOTE_READER_CACHE_SIZE_LIMIT = 1000
CARBON_METRIC_PREFIX='carbon'
CARBONLINK_HOSTS = ["127.0.0.1:7002"]
CARBONLINK_TIMEOUT = 1.0
CARBONLINK_HASHING_KEYFUNC = None
CARBONLINK_RETRY_DELAY = 15
REPLICATION_FACTOR = 1
MEMCACHE_HOSTS = []
MEMCACHE_KEY_PREFIX = ''
FIND_CACHE_DURATION = 300
FIND_TOLERANCE = 2 * FIND_CACHE_DURATION
DEFAULT_CACHE_DURATION = 60 #metric data and graphs are cached for one minute by default
LOG_CACHE_PERFORMANCE = False
LOG_ROTATE = True
MAX_FETCH_RETRIES = 2
#Remote rendering settings
REMOTE_RENDERING = False #if True, rendering is delegated to RENDERING_HOSTS
RENDERING_HOSTS = []
REMOTE_RENDER_CONNECT_TIMEOUT = 1.0
LOG_RENDERING_PERFORMANCE = False
#Miscellaneous settings
SMTP_SERVER = "localhost"
DOCUMENTATION_URL = "http://graphite.readthedocs.org/"
ALLOW_ANONYMOUS_CLI = True
LOG_METRIC_ACCESS = False
LEGEND_MAX_ITEMS = 10
RRD_CF = 'AVERAGE'
STORAGE_FINDERS = (
'graphite.finders.standard.StandardFinder',
)
#Authentication settings
USE_LDAP_AUTH = False
LDAP_SERVER = "" # "ldapserver.mydomain.com"
LDAP_PORT = 389
LDAP_USE_TLS = False
LDAP_SEARCH_BASE = "" # "OU=users,DC=mydomain,DC=com"
LDAP_BASE_USER = "" # "CN=some_readonly_account,DC=mydomain,DC=com"
LDAP_BASE_PASS = "" # "my_password"
LDAP_USER_QUERY = "" # "(username=%s)" For Active Directory use "(sAMAccountName=%s)"
LDAP_URI = None
#Set this to True to delegate authentication to the web server
USE_REMOTE_USER_AUTHENTICATION = False
# Django 1.5 requires this so we set a default but warn the user
SECRET_KEY = 'UNSAFE_DEFAULT'
# Django 1.5 requires this to be set. Here we default to prior behavior and allow all
ALLOWED_HOSTS = [ '*' ]
# Override to link a different URL for login (e.g. for django_openid_auth)
LOGIN_URL = '/account/login'
# Set to True to require authentication to save or delete dashboards
DASHBOARD_REQUIRE_AUTHENTICATION = False
# Require Django change/delete permissions to save or delete dashboards.
# NOTE: Requires DASHBOARD_REQUIRE_AUTHENTICATION to be set
DASHBOARD_REQUIRE_PERMISSIONS = False
# Name of a group to which the user must belong to save or delete dashboards. Alternative to
# DASHBOARD_REQUIRE_PERMISSIONS, particularly useful when using only LDAP (without Admin app)
# NOTE: Requires DASHBOARD_REQUIRE_AUTHENTICATION to be set
DASHBOARD_REQUIRE_EDIT_GROUP = None
DATABASES = {
'default': {
'NAME': '/opt/graphite/storage/graphite.db',
'ENGINE': 'django.db.backends.sqlite3',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
},
}
# If using rrdcached, set to the address or socket of the daemon
FLUSHRRDCACHED = ''
## Load our local_settings
try:
from graphite.local_settings import *
except ImportError:
print >> sys.stderr, "Could not import graphite.local_settings, using defaults!"
## Load Django settings if they werent picked up in local_settings
if not GRAPHITE_WEB_APP_SETTINGS_LOADED:
from graphite.app_settings import *
## Set config dependent on flags set in local_settings
# Path configuration
if not CONTENT_DIR:
CONTENT_DIR = join(WEBAPP_DIR, 'content')
if not CSS_DIR:
CSS_DIR = join(CONTENT_DIR, 'css')
if not CONF_DIR:
CONF_DIR = os.environ.get('GRAPHITE_CONF_DIR', join(GRAPHITE_ROOT, 'conf'))
if not DASHBOARD_CONF:
DASHBOARD_CONF = join(CONF_DIR, 'dashboard.conf')
if not GRAPHTEMPLATES_CONF:
GRAPHTEMPLATES_CONF = join(CONF_DIR, 'graphTemplates.conf')
if not STORAGE_DIR:
STORAGE_DIR = os.environ.get('GRAPHITE_STORAGE_DIR', join(GRAPHITE_ROOT, 'storage'))
if not WHITELIST_FILE:
WHITELIST_FILE = join(STORAGE_DIR, 'lists', 'whitelist')
if not INDEX_FILE:
INDEX_FILE = join(STORAGE_DIR, 'index')
if not LOG_DIR:
LOG_DIR = join(STORAGE_DIR, 'log', 'webapp')
if not WHISPER_DIR:
WHISPER_DIR = join(STORAGE_DIR, 'whisper/')
if not CERES_DIR:
CERES_DIR = join(STORAGE_DIR, 'ceres/')
if not RRD_DIR:
RRD_DIR = join(STORAGE_DIR, 'rrd/')
if not STANDARD_DIRS:
try:
import whisper
if os.path.exists(WHISPER_DIR):
STANDARD_DIRS.append(WHISPER_DIR)
except ImportError:
print >> sys.stderr, "WARNING: whisper module could not be loaded, whisper support disabled"
try:
import rrdtool
if os.path.exists(RRD_DIR):
STANDARD_DIRS.append(RRD_DIR)
except ImportError:
pass
# Default sqlite db file
# This is set here so that a user-set STORAGE_DIR is available
if 'sqlite3' in DATABASES.get('default',{}).get('ENGINE','') \
and not DATABASES.get('default',{}).get('NAME'):
DATABASES['default']['NAME'] = join(STORAGE_DIR, 'graphite.db')
# Caching shortcuts
if MEMCACHE_HOSTS:
CACHES['default'] = {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': MEMCACHE_HOSTS,
'TIMEOUT': DEFAULT_CACHE_DURATION,
'KEY_PREFIX': MEMCACHE_KEY_PREFIX,
}
# Authentication shortcuts
if USE_LDAP_AUTH and LDAP_URI is None:
LDAP_URI = "ldap://%s:%d/" % (LDAP_SERVER, LDAP_PORT)
if USE_REMOTE_USER_AUTHENTICATION:
MIDDLEWARE_CLASSES += ('django.contrib.auth.middleware.RemoteUserMiddleware',)
AUTHENTICATION_BACKENDS.insert(0,'django.contrib.auth.backends.RemoteUserBackend')
if USE_LDAP_AUTH:
AUTHENTICATION_BACKENDS.insert(0,'graphite.account.ldapBackend.LDAPBackend')
if SECRET_KEY == 'UNSAFE_DEFAULT':
warn('SECRET_KEY is set to an unsafe default. This should be set in local_settings.py for better security')
| g76r/graphite-web | webapp/graphite/settings.py | Python | apache-2.0 | 6,812 | 0.009689 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# cRedditscore documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# To get scipy to load for readthedocs:
class Mock(object):
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return Mock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper():
return type(name, (), {})
else:
return Mock()
MOCK_MODULES = [
'scipy',
'sklearn',
'sklearn.naive_bayes',
'sklearn.feature_extraction',
]
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import cRedditscore
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cReddit score'
copyright = u'2015, Gautam Sisodia'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = cRedditscore.__version__
# The full version, including alpha/beta/rc tags.
release = cRedditscore.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
# keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'cRedditscoredoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'cRedditscore.tex',
u'cReddit score Documentation',
u'Gautam Sisodia', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'cRedditscore',
u'cReddit score Documentation',
[u'Gautam Sisodia'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'cRedditscore',
u'cReddit score Documentation',
u'Gautam Sisodia',
'cRedditscore',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| gautsi/cRedditscore | docs/conf.py | Python | bsd-3-clause | 9,078 | 0.003855 |
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AppStoreProduct(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'market_place': 'str',
'product_id': 'str'
}
attribute_map = {
'market_place': 'marketPlace',
'product_id': 'productId'
}
def __init__(self, market_place=None, product_id=None): # noqa: E501
"""AppStoreProduct - a model defined in Swagger""" # noqa: E501
self._market_place = None
self._product_id = None
self.discriminator = None
if market_place is not None:
self.market_place = market_place
if product_id is not None:
self.product_id = product_id
@property
def market_place(self):
"""Gets the market_place of this AppStoreProduct. # noqa: E501
# noqa: E501
:return: The market_place of this AppStoreProduct. # noqa: E501
:rtype: str
"""
return self._market_place
@market_place.setter
def market_place(self, market_place):
"""Sets the market_place of this AppStoreProduct.
# noqa: E501
:param market_place: The market_place of this AppStoreProduct. # noqa: E501
:type: str
"""
self._market_place = market_place
@property
def product_id(self):
"""Gets the product_id of this AppStoreProduct. # noqa: E501
The Product ID from the AppStore. # noqa: E501
:return: The product_id of this AppStoreProduct. # noqa: E501
:rtype: str
"""
return self._product_id
@product_id.setter
def product_id(self, product_id):
"""Sets the product_id of this AppStoreProduct.
The Product ID from the AppStore. # noqa: E501
:param product_id: The product_id of this AppStoreProduct. # noqa: E501
:type: str
"""
self._product_id = product_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AppStoreProduct, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AppStoreProduct):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| docusign/docusign-python-client | docusign_esign/models/app_store_product.py | Python | mit | 4,127 | 0.000242 |
from django import forms
from django.db.models import Avg, Count, Max, Min, StdDev
from django.db.models import Q
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.simple import direct_to_template
from main.utils import make_choice
from .models import Mirror, MirrorUrl, MirrorProtocol
from .utils import get_mirror_statuses, get_mirror_errors
import datetime
from operator import attrgetter
class MirrorlistForm(forms.Form):
country = forms.MultipleChoiceField(required=False)
protocol = forms.MultipleChoiceField(required=False)
ip_version = forms.MultipleChoiceField(required=False,
label="IP version", choices=(('4','IPv4'), ('6','IPv6')))
use_mirror_status = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(MirrorlistForm, self).__init__(*args, **kwargs)
mirrors = Mirror.objects.filter(active=True).values_list(
'country', flat=True).distinct().order_by('country')
self.fields['country'].choices = [('all','All')] + make_choice(
mirrors)
self.fields['country'].initial = ['all']
protos = make_choice(
MirrorProtocol.objects.filter(is_download=True))
self.fields['protocol'].choices = protos
self.fields['protocol'].initial = [t[0] for t in protos]
self.fields['ip_version'].initial = ['4']
@csrf_exempt
def generate_mirrorlist(request):
if request.REQUEST.get('country', ''):
form = MirrorlistForm(data=request.REQUEST)
if form.is_valid():
countries = form.cleaned_data['country']
protocols = form.cleaned_data['protocol']
use_status = form.cleaned_data['use_mirror_status']
ipv4 = '4' in form.cleaned_data['ip_version']
ipv6 = '6' in form.cleaned_data['ip_version']
return find_mirrors(request, countries, protocols,
use_status, ipv4, ipv6)
else:
form = MirrorlistForm()
return direct_to_template(request, 'mirrors/index.html', {'mirrorlist_form': form})
def find_mirrors(request, countries=None, protocols=None, use_status=False,
ipv4_supported=True, ipv6_supported=True):
if not protocols:
protocols = MirrorProtocol.objects.filter(
is_download=True).values_list('protocol', flat=True)
qset = MirrorUrl.objects.select_related().filter(
protocol__protocol__in=protocols,
mirror__public=True, mirror__active=True, mirror__isos=True
)
if countries and 'all' not in countries:
qset = qset.filter(mirror__country__in=countries)
ip_version = Q()
if ipv4_supported:
ip_version |= Q(has_ipv4=True)
if ipv6_supported:
ip_version |= Q(has_ipv6=True)
qset = qset.filter(ip_version)
if not use_status:
urls = qset.order_by('mirror__country', 'mirror__name', 'url')
template = 'mirrors/mirrorlist.txt'
else:
status_info = get_mirror_statuses()
scores = dict([(u.id, u.score) for u in status_info['urls']])
urls = []
for u in qset:
u.score = scores[u.id]
if u.score and u.score < 100.0:
urls.append(u)
urls = sorted(urls, key=attrgetter('score'))
template = 'mirrors/mirrorlist_status.txt'
return direct_to_template(request, template, {
'mirror_urls': urls,
},
mimetype='text/plain')
def mirrors(request):
mirrors = Mirror.objects.select_related().order_by('tier', 'country')
if not request.user.is_authenticated():
mirrors = mirrors.filter(public=True, active=True)
return direct_to_template(request, 'mirrors/mirrors.html',
{'mirror_list': mirrors})
def mirror_details(request, name):
mirror = get_object_or_404(Mirror, name=name)
if not request.user.is_authenticated() and \
(not mirror.public or not mirror.active):
# TODO: maybe this should be 403? but that would leak existence
raise Http404
return direct_to_template(request, 'mirrors/mirror_details.html',
{'mirror': mirror})
def status(request):
bad_timedelta = datetime.timedelta(days=3)
status_info = get_mirror_statuses()
urls = status_info['urls']
good_urls = []
bad_urls = []
for url in urls:
# split them into good and bad lists based on delay
if not url.delay or url.delay > bad_timedelta:
bad_urls.append(url)
else:
good_urls.append(url)
context = status_info.copy()
context.update({
'good_urls': good_urls,
'bad_urls': bad_urls,
'error_logs': get_mirror_errors(),
})
return direct_to_template(request, 'mirrors/status.html', context)
# vim: set ts=4 sw=4 et:
| pyropeter/archweb | mirrors/views.py | Python | gpl-2.0 | 4,912 | 0.003054 |
# GUI frame for the dftModel_function.py
from Tkinter import *
import tkFileDialog, tkMessageBox
import sys, os
import pygame
from scipy.io.wavfile import read
import dftModel_function
class DftModel_frame:
def __init__(self, parent):
self.parent = parent
self.initUI()
pygame.init()
def initUI(self):
choose_label = "Input file (.wav, mono and 44100 sampling rate):"
Label(self.parent, text=choose_label).grid(row=0, column=0, sticky=W, padx=5, pady=(10,2))
#TEXTBOX TO PRINT PATH OF THE SOUND FILE
self.filelocation = Entry(self.parent)
self.filelocation.focus_set()
self.filelocation["width"] = 25
self.filelocation.grid(row=1,column=0, sticky=W, padx=10)
self.filelocation.delete(0, END)
self.filelocation.insert(0, '../../sounds/piano.wav')
#BUTTON TO BROWSE SOUND FILE
self.open_file = Button(self.parent, text="Browse...", command=self.browse_file) #see: def browse_file(self)
self.open_file.grid(row=1, column=0, sticky=W, padx=(220, 6)) #put it beside the filelocation textbox
#BUTTON TO PREVIEW SOUND FILE
self.preview = Button(self.parent, text=">", command=self.preview_sound, bg="gray30", fg="white")
self.preview.grid(row=1, column=0, sticky=W, padx=(306,6))
## DFT MODEL
#ANALYSIS WINDOW TYPE
wtype_label = "Window type:"
Label(self.parent, text=wtype_label).grid(row=2, column=0, sticky=W, padx=5, pady=(10,2))
self.w_type = StringVar()
self.w_type.set("blackman") # initial value
window_option = OptionMenu(self.parent, self.w_type, "rectangular", "hanning", "hamming", "blackman", "blackmanharris")
window_option.grid(row=2, column=0, sticky=W, padx=(95,5), pady=(10,2))
#WINDOW SIZE
M_label = "Window size (M):"
Label(self.parent, text=M_label).grid(row=3, column=0, sticky=W, padx=5, pady=(10,2))
self.M = Entry(self.parent, justify=CENTER)
self.M["width"] = 5
self.M.grid(row=3,column=0, sticky=W, padx=(115,5), pady=(10,2))
self.M.delete(0, END)
self.M.insert(0, "511")
#FFT SIZE
N_label = "FFT size (N) (power of two bigger than M):"
Label(self.parent, text=N_label).grid(row=4, column=0, sticky=W, padx=5, pady=(10,2))
self.N = Entry(self.parent, justify=CENTER)
self.N["width"] = 5
self.N.grid(row=4,column=0, sticky=W, padx=(270,5), pady=(10,2))
self.N.delete(0, END)
self.N.insert(0, "1024")
#TIME TO START ANALYSIS
time_label = "Time in sound (in seconds):"
Label(self.parent, text=time_label).grid(row=5, column=0, sticky=W, padx=5, pady=(10,2))
self.time = Entry(self.parent, justify=CENTER)
self.time["width"] = 5
self.time.grid(row=5, column=0, sticky=W, padx=(180,5), pady=(10,2))
self.time.delete(0, END)
self.time.insert(0, ".2")
#BUTTON TO COMPUTE EVERYTHING
self.compute = Button(self.parent, text="Compute", command=self.compute_model, bg="dark red", fg="white")
self.compute.grid(row=6, column=0, padx=5, pady=(10,15), sticky=W)
# define options for opening file
self.file_opt = options = {}
options['defaultextension'] = '.wav'
options['filetypes'] = [('All files', '.*'), ('Wav files', '.wav')]
options['initialdir'] = '../../sounds/'
options['title'] = 'Open a mono audio file .wav with sample frequency 44100 Hz'
def preview_sound(self):
filename = self.filelocation.get()
if filename[-4:] == '.wav':
fs, x = read(filename)
else:
tkMessageBox.showerror("Wav file", "The audio file must be a .wav")
return
if len(x.shape) > 1 :
tkMessageBox.showerror("Stereo file", "Audio file must be Mono not Stereo")
elif fs != 44100:
tkMessageBox.showerror("Sample Frequency", "Sample frequency must be 44100 Hz")
else:
sound = pygame.mixer.Sound(filename)
sound.play()
def browse_file(self):
self.filename = tkFileDialog.askopenfilename(**self.file_opt)
#set the text of the self.filelocation
self.filelocation.delete(0, END)
self.filelocation.insert(0,self.filename)
def compute_model(self):
try:
inputFile = self.filelocation.get()
window = self.w_type.get()
M = int(self.M.get())
N = int(self.N.get())
time = float(self.time.get())
dftModel_function.extractHarmSpec(inputFile, window, M, N, time)
except ValueError as errorMessage:
tkMessageBox.showerror("Input values error",errorMessage)
| georgid/sms-tools | software/models_interface/dftModel_GUI_frame.py | Python | agpl-3.0 | 4,273 | 0.040019 |
# -*- coding: utf-8 -*-
# (c) 2018 Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import datetime
import os
from ansible.module_utils.urls import (Request, open_url, urllib_request, HAS_SSLCONTEXT, cookiejar, RequestWithMethod,
UnixHTTPHandler, UnixHTTPSConnection, httplib)
from ansible.module_utils.urls import SSLValidationHandler, HTTPSClientAuthHandler, RedirectHandlerFactory
import pytest
from mock import call
if HAS_SSLCONTEXT:
import ssl
@pytest.fixture
def urlopen_mock(mocker):
return mocker.patch('ansible.module_utils.urls.urllib_request.urlopen')
@pytest.fixture
def install_opener_mock(mocker):
return mocker.patch('ansible.module_utils.urls.urllib_request.install_opener')
def test_Request_fallback(urlopen_mock, install_opener_mock, mocker):
cookies = cookiejar.CookieJar()
request = Request(
headers={'foo': 'bar'},
use_proxy=False,
force=True,
timeout=100,
validate_certs=False,
url_username='user',
url_password='passwd',
http_agent='ansible-tests',
force_basic_auth=True,
follow_redirects='all',
client_cert='/tmp/client.pem',
client_key='/tmp/client.key',
cookies=cookies,
unix_socket='/foo/bar/baz.sock',
ca_path='/foo/bar/baz.pem',
)
fallback_mock = mocker.spy(request, '_fallback')
r = request.open('GET', 'https://ansible.com')
calls = [
call(None, False), # use_proxy
call(None, True), # force
call(None, 100), # timeout
call(None, False), # validate_certs
call(None, 'user'), # url_username
call(None, 'passwd'), # url_password
call(None, 'ansible-tests'), # http_agent
call(None, True), # force_basic_auth
call(None, 'all'), # follow_redirects
call(None, '/tmp/client.pem'), # client_cert
call(None, '/tmp/client.key'), # client_key
call(None, cookies), # cookies
call(None, '/foo/bar/baz.sock'), # unix_socket
call(None, '/foo/bar/baz.pem'), # ca_path
]
fallback_mock.assert_has_calls(calls)
assert fallback_mock.call_count == 14 # All but headers use fallback
args = urlopen_mock.call_args[0]
assert args[1] is None # data, this is handled in the Request not urlopen
assert args[2] == 100 # timeout
req = args[0]
assert req.headers == {
'Authorization': b'Basic dXNlcjpwYXNzd2Q=',
'Cache-control': 'no-cache',
'Foo': 'bar',
'User-agent': 'ansible-tests'
}
assert req.data is None
assert req.get_method() == 'GET'
def test_Request_open(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/')
args = urlopen_mock.call_args[0]
assert args[1] is None # data, this is handled in the Request not urlopen
assert args[2] == 10 # timeout
req = args[0]
assert req.headers == {}
assert req.data is None
assert req.get_method() == 'GET'
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
if not HAS_SSLCONTEXT:
expected_handlers = (
SSLValidationHandler,
RedirectHandlerFactory(), # factory, get handler
)
else:
expected_handlers = (
RedirectHandlerFactory(), # factory, get handler
)
found_handlers = []
for handler in handlers:
if isinstance(handler, SSLValidationHandler) or handler.__class__.__name__ == 'RedirectHandler':
found_handlers.append(handler)
assert len(found_handlers) == len(expected_handlers)
def test_Request_open_http(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/')
args = urlopen_mock.call_args[0]
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
found_handlers = []
for handler in handlers:
if isinstance(handler, SSLValidationHandler):
found_handlers.append(handler)
assert len(found_handlers) == 0
def test_Request_open_unix_socket(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', unix_socket='/foo/bar/baz.sock')
args = urlopen_mock.call_args[0]
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
found_handlers = []
for handler in handlers:
if isinstance(handler, UnixHTTPHandler):
found_handlers.append(handler)
assert len(found_handlers) == 1
def test_Request_open_https_unix_socket(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', unix_socket='/foo/bar/baz.sock')
args = urlopen_mock.call_args[0]
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
found_handlers = []
for handler in handlers:
if isinstance(handler, HTTPSClientAuthHandler):
found_handlers.append(handler)
assert len(found_handlers) == 1
inst = found_handlers[0]._build_https_connection('foo')
assert isinstance(inst, UnixHTTPSConnection)
def test_Request_open_ftp(urlopen_mock, install_opener_mock, mocker):
mocker.patch('ansible.module_utils.urls.ParseResultDottedDict.as_list', side_effect=AssertionError)
# Using ftp scheme should prevent the AssertionError side effect to fire
r = Request().open('GET', 'ftp://foo@ansible.com/')
def test_Request_open_headers(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', headers={'Foo': 'bar'})
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers == {'Foo': 'bar'}
def test_Request_open_username(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', url_username='user')
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib_request.HTTPBasicAuthHandler,
urllib_request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert len(found_handlers) == 2
assert found_handlers[0].passwd.passwd[None] == {(('ansible.com', '/'),): ('user', None)}
def test_Request_open_username_in_url(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://user2@ansible.com/')
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib_request.HTTPBasicAuthHandler,
urllib_request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert found_handlers[0].passwd.passwd[None] == {(('ansible.com', '/'),): ('user2', '')}
def test_Request_open_username_force_basic(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://ansible.com/', url_username='user', url_password='passwd', force_basic_auth=True)
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib_request.HTTPBasicAuthHandler,
urllib_request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert len(found_handlers) == 0
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('Authorization') == b'Basic dXNlcjpwYXNzd2Q='
def test_Request_open_auth_in_netloc(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'http://user:passwd@ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.get_full_url() == 'http://ansible.com/'
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
expected_handlers = (
urllib_request.HTTPBasicAuthHandler,
urllib_request.HTTPDigestAuthHandler,
)
found_handlers = []
for handler in handlers:
if isinstance(handler, expected_handlers):
found_handlers.append(handler)
assert len(found_handlers) == 2
def test_Request_open_netrc(urlopen_mock, install_opener_mock, monkeypatch):
here = os.path.dirname(__file__)
monkeypatch.setenv('NETRC', os.path.join(here, 'fixtures/netrc'))
r = Request().open('GET', 'http://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('Authorization') == b'Basic dXNlcjpwYXNzd2Q='
r = Request().open('GET', 'http://foo.ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert 'Authorization' not in req.headers
monkeypatch.setenv('NETRC', os.path.join(here, 'fixtures/netrc.nonexistant'))
r = Request().open('GET', 'http://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert 'Authorization' not in req.headers
def test_Request_open_no_proxy(urlopen_mock, install_opener_mock, mocker):
build_opener_mock = mocker.patch('ansible.module_utils.urls.urllib_request.build_opener')
r = Request().open('GET', 'http://ansible.com/', use_proxy=False)
handlers = build_opener_mock.call_args[0]
found_handlers = []
for handler in handlers:
if isinstance(handler, urllib_request.ProxyHandler):
found_handlers.append(handler)
assert len(found_handlers) == 1
@pytest.mark.skipif(not HAS_SSLCONTEXT, reason="requires SSLContext")
def test_Request_open_no_validate_certs(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', validate_certs=False)
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
ssl_handler = None
for handler in handlers:
if isinstance(handler, HTTPSClientAuthHandler):
ssl_handler = handler
break
assert ssl_handler is not None
inst = ssl_handler._build_https_connection('foo')
assert isinstance(inst, httplib.HTTPSConnection)
context = ssl_handler._context
assert context.protocol == ssl.PROTOCOL_SSLv23
if ssl.OP_NO_SSLv2:
assert context.options & ssl.OP_NO_SSLv2
assert context.options & ssl.OP_NO_SSLv3
assert context.verify_mode == ssl.CERT_NONE
assert context.check_hostname is False
def test_Request_open_client_cert(urlopen_mock, install_opener_mock):
here = os.path.dirname(__file__)
client_cert = os.path.join(here, 'fixtures/client.pem')
client_key = os.path.join(here, 'fixtures/client.key')
r = Request().open('GET', 'https://ansible.com/', client_cert=client_cert, client_key=client_key)
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
ssl_handler = None
for handler in handlers:
if isinstance(handler, HTTPSClientAuthHandler):
ssl_handler = handler
break
assert ssl_handler is not None
assert ssl_handler.client_cert == client_cert
assert ssl_handler.client_key == client_key
https_connection = ssl_handler._build_https_connection('ansible.com')
assert https_connection.key_file == client_key
assert https_connection.cert_file == client_cert
def test_Request_open_cookies(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', cookies=cookiejar.CookieJar())
opener = install_opener_mock.call_args[0][0]
handlers = opener.handlers
cookies_handler = None
for handler in handlers:
if isinstance(handler, urllib_request.HTTPCookieProcessor):
cookies_handler = handler
break
assert cookies_handler is not None
def test_Request_open_invalid_method(urlopen_mock, install_opener_mock):
r = Request().open('UNKNOWN', 'https://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.data is None
assert req.get_method() == 'UNKNOWN'
# assert r.status == 504
def test_Request_open_custom_method(urlopen_mock, install_opener_mock):
r = Request().open('DELETE', 'https://ansible.com/')
args = urlopen_mock.call_args[0]
req = args[0]
assert isinstance(req, RequestWithMethod)
def test_Request_open_user_agent(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', http_agent='ansible-tests')
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('User-agent') == 'ansible-tests'
def test_Request_open_force(urlopen_mock, install_opener_mock):
r = Request().open('GET', 'https://ansible.com/', force=True, last_mod_time=datetime.datetime.now())
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('Cache-control') == 'no-cache'
assert 'If-modified-since' not in req.headers
def test_Request_open_last_mod(urlopen_mock, install_opener_mock):
now = datetime.datetime.now()
r = Request().open('GET', 'https://ansible.com/', last_mod_time=now)
args = urlopen_mock.call_args[0]
req = args[0]
assert req.headers.get('If-modified-since') == now.strftime('%a, %d %b %Y %H:%M:%S -0000')
def test_Request_open_headers_not_dict(urlopen_mock, install_opener_mock):
with pytest.raises(ValueError):
Request().open('GET', 'https://ansible.com/', headers=['bob'])
def test_Request_init_headers_not_dict(urlopen_mock, install_opener_mock):
with pytest.raises(ValueError):
Request(headers=['bob'])
@pytest.mark.parametrize('method,kwargs', [
('get', {}),
('options', {}),
('head', {}),
('post', {'data': None}),
('put', {'data': None}),
('patch', {'data': None}),
('delete', {}),
])
def test_methods(method, kwargs, mocker):
expected = method.upper()
open_mock = mocker.patch('ansible.module_utils.urls.Request.open')
request = Request()
getattr(request, method)('https://ansible.com')
open_mock.assert_called_once_with(expected, 'https://ansible.com', **kwargs)
def test_open_url(urlopen_mock, install_opener_mock, mocker):
req_mock = mocker.patch('ansible.module_utils.urls.Request.open')
open_url('https://ansible.com/')
req_mock.assert_called_once_with('GET', 'https://ansible.com/', data=None, headers=None, use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None,
force_basic_auth=False, follow_redirects='urllib2',
client_cert=None, client_key=None, cookies=None, use_gssapi=False,
unix_socket=None, ca_path=None)
| cchurch/ansible | test/units/module_utils/urls/test_Request.py | Python | gpl-3.0 | 14,937 | 0.001674 |
class InvalidValueState(ValueError):
pass
| szopu/datadiffs | datadiffs/exceptions.py | Python | mit | 46 | 0 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 12, transform = "Integration", sigma = 0.0, exog_count = 0, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_Integration/trend_ConstantTrend/cycle_12/ar_12/test_artificial_128_Integration_ConstantTrend_12_12_0.py | Python | bsd-3-clause | 271 | 0.084871 |
import bpy
op = bpy.context.active_operator
op.radius = 0.5
op.arc_div = 8
op.lin_div = 0
op.size = (0.0, 0.0, 3.0)
op.div_type = 'CORNERS'
| Microvellum/Fluid-Designer | win64-vc/2.78/Python/bin/2.78/scripts/addons/presets/operator/mesh.primitive_round_cube_add/Capsule.py | Python | gpl-3.0 | 141 | 0 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'SourceRegion.is_high_risk'
db.execute('''
ALTER TABLE source_region
DROP COLUMN is_high_risk;
''')
def backwards(self, orm):
# Adding field 'SourceRegion.is_high_risk'
pass
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'datapoints.campaign': {
'Meta': {'ordering': "('-start_date',)", 'unique_together': "(('office', 'start_date'),)", 'object_name': 'Campaign', 'db_table': "'campaign'"},
'campaign_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.CampaignType']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Office']"}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'get_full_name'", 'unique_with': '()'}),
'start_date': ('django.db.models.fields.DateField', [], {})
},
u'datapoints.campaigntype': {
'Meta': {'object_name': 'CampaignType', 'db_table': "'campaign_type'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '55'})
},
u'datapoints.indicator': {
'Meta': {'ordering': "('name',)", 'object_name': 'Indicator', 'db_table': "'indicator'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_reported': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'short_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '255', 'populate_from': "'name'", 'unique_with': '()'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Source']"})
},
u'datapoints.office': {
'Meta': {'object_name': 'Office', 'db_table': "'office'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '55'})
},
u'datapoints.region': {
'Meta': {'unique_together': "(('name', 'region_type', 'office'),)", 'object_name': 'Region', 'db_table': "'region'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Office']"}),
'parent_region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Region']", 'null': 'True'}),
'region_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'region_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.RegionType']"}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '255', 'populate_from': "'name'", 'unique_with': '()'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Source']"})
},
u'datapoints.regiontype': {
'Meta': {'object_name': 'RegionType', 'db_table': "'region_type'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '55'})
},
u'datapoints.source': {
'Meta': {'object_name': 'Source', 'db_table': "'source'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_description': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'source_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '55'})
},
u'source_data.campaignmap': {
'Meta': {'object_name': 'CampaignMap', 'db_table': "'campaign_map'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mapped_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'master_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Campaign']"}),
'source_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.SourceCampaign']", 'unique': 'True'})
},
u'source_data.document': {
'Meta': {'ordering': "('-id',)", 'unique_together': "(('docfile', 'doc_text'),)", 'object_name': 'Document'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'doc_text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'docfile': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'master_datapoint_count': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Source']"}),
'source_datapoint_count': ('django.db.models.fields.IntegerField', [], {'null': 'True'})
},
u'source_data.documentdetail': {
'Meta': {'object_name': 'DocumentDetail', 'db_table': "'document_detail'"},
'db_model': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map_id': ('django.db.models.fields.IntegerField', [], {}),
'master_dp_count': ('django.db.models.fields.IntegerField', [], {}),
'master_object_id': ('django.db.models.fields.IntegerField', [], {}),
'source_dp_count': ('django.db.models.fields.IntegerField', [], {}),
'source_object_id': ('django.db.models.fields.IntegerField', [], {}),
'source_string': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'source_data.etljob': {
'Meta': {'ordering': "('-date_attempted',)", 'object_name': 'EtlJob'},
'cron_guid': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'date_attempted': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 6, 5, 0, 0)'}),
'date_completed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'error_msg': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'guid': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'success_msg': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '55'})
},
u'source_data.indicatormap': {
'Meta': {'object_name': 'IndicatorMap', 'db_table': "'indicator_map'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mapped_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'master_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Indicator']"}),
'source_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.SourceIndicator']", 'unique': 'True'})
},
u'source_data.odkform': {
'Meta': {'object_name': 'ODKForm', 'db_table': "'odk_form'"},
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']", 'null': 'True'}),
'form_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_processed': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'master_datapoint_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'response_msg': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'source_datapoint_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'source_data.processstatus': {
'Meta': {'object_name': 'ProcessStatus'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status_description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status_text': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
u'source_data.regionmap': {
'Meta': {'object_name': 'RegionMap', 'db_table': "'region_map'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mapped_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'master_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Region']"}),
'source_object': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.SourceRegion']", 'unique': 'True'})
},
u'source_data.sourcecampaign': {
'Meta': {'object_name': 'SourceCampaign', 'db_table': "'source_campaign'"},
'campaign_string': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'source_data.sourcedatapoint': {
'Meta': {'unique_together': "(('source', 'source_guid', 'indicator_string'),)", 'object_name': 'SourceDataPoint', 'db_table': "'source_datapoint'"},
'campaign_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'cell_value': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 6, 5, 0, 0)'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
'guid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator_string': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'region_code': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'row_number': ('django.db.models.fields.IntegerField', [], {}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['datapoints.Source']"}),
'source_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['source_data.ProcessStatus']"})
},
u'source_data.sourceindicator': {
'Meta': {'object_name': 'SourceIndicator', 'db_table': "'source_indicator'"},
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'indicator_string': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'source_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'source_data.sourceregion': {
'Meta': {'object_name': 'SourceRegion', 'db_table': "'source_region'"},
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'document': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.Document']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'lon': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'parent_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'parent_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'region_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'region_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'source_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'source_data.sourceregionpolygon': {
'Meta': {'object_name': 'SourceRegionPolygon', 'db_table': "'source_region_polygon'"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'polygon': ('django.db.models.fields.TextField', [], {}),
'shape_area': ('django.db.models.fields.FloatField', [], {}),
'shape_len': ('django.db.models.fields.FloatField', [], {}),
'source_region': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['source_data.SourceRegion']", 'unique': 'True'})
},
'source_data.vcmsettlement': {
'Meta': {'object_name': 'VCMSettlement', 'db_table': "'odk_vcm_settlement'"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 6, 5, 0, 0)'}),
'daterecorded': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'deviceid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'meta_instanceid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'phonenumber': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'process_status': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['source_data.ProcessStatus']"}),
'request_guid': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'settlementcode': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'settlementgps_accuracy': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'settlementgps_altitude': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'settlementgps_latitude': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'settlementgps_longitude': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'settlementname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'simserial': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'submissiondate': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'vcmname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'vcmphone': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['source_data']
| SeedScientific/polio | source_data/migrations/0113_auto__del_field_sourceregion_is_high_risk.py | Python | agpl-3.0 | 20,716 | 0.008351 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.