repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
mph-/lcapy
doc/examples/networks/ladderRC3.py
Python
lgpl-2.1
163
0.01227
from lcapy import R, C n = C('C1') | (R('R1') + (C('C2') | (R('R2') + (C('C3') | (R('R3') + C('C4')))))) n.d
raw(__file__.replace('.py', '.png'), lay
out='ladder')
lmazuel/azure-sdk-for-python
azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/network_interface_association_py3.py
Python
mit
1,345
0
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class NetworkInterfaceAssociation(Model): """Network interface and its custo
m security rules. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Network interface ID. :vartype id: str :param security_rules: Collection of custom security rules. :type security_rules: list[~azure.mgmt.network.v2018_01_01.models.SecurityRule] """ _validation = { 'id': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'security_rules': {'key': 'securit
yRules', 'type': '[SecurityRule]'}, } def __init__(self, *, security_rules=None, **kwargs) -> None: super(NetworkInterfaceAssociation, self).__init__(**kwargs) self.id = None self.security_rules = security_rules
ayys/collegesearch
main/models.py
Python
gpl-3.0
507
0
from django.db import models # Create your models here. class Collage(models.Model): name = models.CharField(max_length=50) slug = models.SlugField(unique=True) no_of_students = models.IntegerField() y
ear_estd = models.IntegerField() def __str
__(self): return self.name class Faculty(models.Model): name = models.CharField(max_length=100) price = models.IntegerField() year_started = models.IntegerField() collage = models.ForeignKey(Collage, default=None)
BYVoid/OpenCC
data/scripts/find_target.py
Python
apache-2.0
282
0
#!/usr/bin/env python # -
*- coding: utf-8 -*- import sys from common import find_target_items if len(sys.argv) != 3: print("Find the value keyword in all pairs") print(("Usage: ", sys.argv[0], "[input] [keyword]")) exit(1) find_target_it
ems(sys.argv[1], sys.argv[2])
unnikrishnankgs/va
venv/lib/python3.5/site-packages/jupyter_core/migrate.py
Python
bsd-2-clause
8,222
0.004014
from __future__ import unicode_literals """Migrating IPython < 4.0 to Jupyter This *copies* configuration and resources to their new locations in Jupyter Migrations: - .ipython/ - nbextensions -> JUPYTER_DATA_DIR/nbextensions - kernels -> JUPYTER_DATA_DIR/kernels - .ipython/profile_default/ - static/custom -> .jupyter/custom - nbconfig -> .jupyter/nbconfig - security/ - notebook_secret, notebook_cookie_secret, nbsignatures.db -> JUPYTER_DATA_DIR - ipython_{notebook,nbconvert,qtconsole}_config.py -> .jupyter/jupyter_{name}_config.py """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os import re import shutil from datetime import datetime from traitlets.config import PyFileConfigLoader, JSONFileConfigLoader from traitlets.log import get_logger from ipython_genutils.path import ensure_dir_exists try: from IPython.paths import get_ipython_dir except ImportError: # IPython < 4 try: from IPython.utils.path import get_ipython_dir except ImportError: def get_ipython_dir(): return os.environ.get('IPYTHONDIR', os.path.expanduser('~/.ipython')) from .paths import jupyter_config_dir, jupyter_data_dir from .application import JupyterApp pjoin = os.path.join migrations = { pjoin('{ipython_dir}', 'nbextensions'): pjoin('{jupyter_data}', 'nbextensions'), pjoin('{ipython_dir}', 'kernels'): pjoin('{jupyter_data}', 'kernels'), pjoin('{profile}', 'nbconfig'): pjoin('{jupyter_config}', 'nbconfig'), } custom_src_t = pjoin('{profile}', 'static', 'custom') custom_dst_t = pjoin('{jupyter_config}', 'custom') for security_file in ('notebook_secret', 'notebook_cookie_secret', 'nbsignatures.db'): src = pjoin('{profile}', 'security', security_file) dst = pjoin('{jupyter_data}', security_file) migrations[src] = dst config_migrations = ['notebook', 'nbconvert', 'qtconsole'] regex = re.compile config_substitutions = { regex(r'\bIPythonQtConsoleApp\b'): 'JupyterQtConsoleApp', regex(r'\bIPythonWidget\b'): 'JupyterWidget', regex(r'\bRichIPythonWidget\b'): 'RichJupyterWidget', regex(r'\bIPython\.html\b'): 'notebook', regex(r'\bIPython\.nbconvert\b'): 'nbconvert', } def migrate_dir(src, dst): """Migrate a directory from src to dst""" log = get_logger() if not os.listdir(src): log.debug("No files in %s" % src) return False if os.path.exists(dst): if os.listdir(dst): # already exists, non-empty log.debug("%s already exists" % dst) return False else: os.rmdir(dst) log.info("Copying %s -> %s" % (src, dst)) ensure_dir_exists(os.path.dirname(dst)) shutil.copytree(src, dst, symlinks=True) return True def migrate_file(src, dst, substitutions=None): """Migrate a single file from src to dst substitutions is an optional dict of {regex: replacement} for performing replacements on the file. """ log = get_logger() if os.path.exists(dst): # already exists log.debug("%s already exists" % dst) return False log.info("Copying %s -> %s" % (src, dst)) ensure_dir_exists(os.path.dirname(dst)) shutil.copy(src, dst) if substitutions: with open(dst) as f: text = f.read() for pat, replacement in substitutions.items(): text = pat.sub(replacement, text) with open(dst, 'w') as f: f.write(text) return True def migrate_one(src, dst): """Migrate one item dispatches to migrate_dir/_file """ log = get_logger() if os.path.isfile(src): return migrate_file(src, dst) elif os.path.isdir(src): return migrate_dir(src, dst) else: log.debug("Nothing to migrate for %s" % src) return False def migrate_static_custom(src, dst): """Migrate non-empty custom.js,css from src to dst src, dst are 'custom' directories containing custom.{js,css} """ log = get_logger() migrated = False custom_js = pjoin(src, 'custom.js') custom_css = pjoin(src, 'custom.css') # check if custom_js is empty: custom_js_empty = True if os.path.isfile(custom_js): with open(custom_js) as f: js = f.read().strip() for line in js.splitlines(): if not ( line.isspace() or line.strip().startswith(('/*', '*', '//')) ): custom_js_empty = False break # check if custom_css is empty: custom_css_empty = True if os.path.isfile(custom_css): with open(custom_css) as f: css = f.read().strip() custom_css_empty = css.startswith('/*') and css.endswith('*/') if custom_js_empty: log.debug("Ignoring empty %s" % custom_js) if custom_css_empty: log.debug("Ignoring empty %s" % custom_css) if custom_js_empty and custom_css_empty: # nothing to migrate return False ensure_dir_exists(dst) if not custom_js_empty or not custom_css_empty: ensure_dir_exists(dst) if not custom_js_empty: if migrate_file(custom_js, pjoin(dst, 'custom.js')): migrated = True if not custom_css_empty: if migrate_file(custom_css, pjoin(dst, 'custom.css')): migrated = True return migrated def migrate_config(name, env): """Migrate a config file Includes substitutions for updated configurable names. """ log = get_logger() src_base = pjoin('{profile}', 'ipython_{name}_config').format(name=name, **env) dst_base = pjoin('{jupyter_config}', 'jupyter_{name}_config').format(name=name, **env) loaders = { '.py': PyFileConfigLoader, '.json': JSONFileConfigLoader, } migrated = [] for ext in ('.py', '.json'): src = src_base + ext dst = dst_base + ext if os.path.exists(src): cfg = loaders[ext](src).load_config() if cfg: if migrate_file(src, dst, substitutions=config_substitutions): migrated.append(src) else: # don't migrate empty config files
log.debug("Not migrating empty config file: %s" % src) return migrated def migrate(): """Migrate IPython configuration to Jupyter""" env = { 'jupyter_data': jupyter_data_dir(), 'jupyter_config': jupyter_config_dir(), 'ipython_dir': get_ipython_dir(), 'profile': os.path.join(get
_ipython_dir(), 'profile_default'), } migrated = False for src_t, dst_t in migrations.items(): src = src_t.format(**env) dst = dst_t.format(**env) if os.path.exists(src): if migrate_one(src, dst): migrated = True for name in config_migrations: if migrate_config(name, env): migrated = True custom_src = custom_src_t.format(**env) custom_dst = custom_dst_t.format(**env) if os.path.exists(custom_src): if migrate_static_custom(custom_src, custom_dst): migrated = True # write a marker to avoid re-running migration checks ensure_dir_exists(env['jupyter_config']) with open(os.path.join(env['jupyter_config'], 'migrated'), 'w') as f: f.write(datetime.utcnow().isoformat()) return migrated class JupyterMigrate(JupyterApp): name = 'jupyter-migrate' description = """ Migrate configuration and data from .ipython prior to 4.0 to Jupyter locations. This migrates: - config files in the default profile - kernels in ~/.ipython/kernels - notebook javascript extensions in ~/.ipython/extensions - custom.js/css to .jupyter/custom to their new Jupyter locations. All files are copied, not moved. If the destinations already exist, nothing will be done. """ def start(self): if not migrate(): self.log.info("Found nothing to migrate.") main = JupyterMigrate.launch_instance
AMairesse/hc-client
src/hc_component.py
Python
gpl-2.0
1,780
0.002809
import pytz import datetime # statics UNKNOWN_TYPE = 'Unknown' DEFAULT_FREQ = 0 # No automatic update by default DEFAULT_TIMEZONE = 'UTC' DEFAULT_STATUS = False class Component(dict): # Private attributes client_hostname = None server_host = None server_user = None server_password = None # Public attributes component_type = UNKNOWN_TYPE type = UNKNOWN_TYPE url = None hostname = None name = None freq = DEFAULT_FREQ status = DEFAULT_STATUS last_value = None last_value_dt = None timezone = None # Init method uses dict so we can pass any field for creation def __init__(self, **kwargs): super(Component, self).__init__(**kwargs) self.__dict__ = self self.timezone = pytz.timezone(DEFAULT_TIMEZONE) # Read a component next refresh date def refresh_dt(self): if self.last_value_dt is None: return datetime.datetime.now(self.timezone) else:
new_value_dt = self.last_value_dt + da
tetime.timedelta(seconds=self.freq) return max(new_value_dt, datetime.datetime.now(self.timezone)) # Add a server hosting config def add_config_server(self, client_hostname, server_host, server_user, server_password): self.client_hostname = client_hostname self.server_host = server_host self.server_user = server_user self.server_password = server_password # Configure the component @staticmethod def initialize(): return True # Register a new component on the server def register(self): return True # Update the component def update(self): self.last_value_dt = datetime.datetime.now(self.timezone) return True
SunDwarf/curious
curious/exc.py
Python
mit
4,130
0.000484
# This file is part of curious. # # curious is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # curious is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with curious. If not, see <http://www.gnu.org/licenses/>. """ Exceptions raised from within the library. .. currentmodule:: curious.exc """ import enum import warnings from asks.response_objects import Response class CuriousError(Exception): """ The base class for all curious exceptions. """ # HTTP based exceptions. class ErrorCode(enum.IntEnum): UNKNOWN_ACCOUNT = 10001 UNKNOWN_APPLICATION = 10002 UNKNOWN_CHANNEL = 10003 UNKNOWN_GUILD = 10004 UNKNOWN_INTEGRATION = 10005 UNKNOWN_INVITE = 10006 UNKNOWN_MEMBER = 10007 UNKNOWN_MESSAGE = 10008 UNKNOWN_OVERWRITE = 1009 UNKNOWN_PROVIDER = 10010 UNKNOWN_ROLE = 10011 UNKNOWN_TOKEN = 10012 UNKNOWN_USER = 10013 UNKNOWN_EMOJI = 10014 NO_BOTS = 20001 ONLY_BOTS = 20002 MAX_GUILDS = 30001 # technically user only MAX_FRIENDS = 30002 MAX_PINS = 30003 MAX_ROLES = 30005 MAX_REACTIONS = 30010 MAX_GUILD_CHANNELS = 30013 UNAUTHORIZED = 40001 MISSING_ACCESS = 50001 INVALID_ACCOUNT = 50002 NO_DMS = 50003 EMBED_DISABLED = 50004 CANNOT_EDIT = 50005 CANNOT_SEND_EMPTY_MESSAGE = 50006 CANNOT_SEND_TO_USER = 50007 CANNOT_SEND_TO_VC = 50008 VERIFICATION_TOO_HIGH = 50009 OAUTH2_NO_BOT = 50010 OAUTH2_LIMIT = 50011 INVALID_OAUTH_STATE = 50012 MISSING_PERMISSIONS = 50013 INVALID_AUTH_TOKEN = 50014 NOTE_TOO_LONG = 50015 INVALID_MESSAGE_COUNT = 50016 CANNOT_PIN = 50019 INVALID_VANITY_URL = 50020 TOO_OLD_TO_BULK_DELETE = 50034 INVALID_FORM_BODY = 50035 INVALID_INVITE_GUILD = 50036 REACTION_BLOCKED = 90001 UNKNOWN = 0 class HTTPException(CuriousError, ConnectionError): """ Raised when a HTTP request fails with a 400 <= e < 600 error code. """ def __init__(self, response: Response, error: dict): self.response = response error_code = error.get("code", 0) try: #: The error code for this response. self.error_code = ErrorCode(error_code) except ValueError: warnings.warn(f"Received unknown error code {error_code}") #: The error code for this response. self.error_code = ErrorCode.UNKNOWN self.error_message = error.get("message") self.error = error def __str__(
self) -> str: if self.error_code == ErrorCode.UNKNOWN: return repr(self.error) return "{} ({}): {}".format(self.error_code, self.error_code.name, self.error_message) __repr__ = __str__ class Unauthorized(HTTPException): """ Raised when your bot token is invalid. """ class Forbidden(HTTPException): """ Raised when you don't have permission for something. """ class NotFound(HTTPException): """ Raised when someth
ing could not be found. """ class PermissionsError(CuriousError, PermissionError): """ Raised when you do not have sufficient permission to perform an action. :ivar permission_required: The string of the permission required to perform this action. """ def __init__(self, permission_required: str): self.permission_required = permission_required def __str__(self) -> str: return "Bot requires the permission {} to perform this action"\ .format(self.permission_required) __repr__ = __str__ class HierarchyError(CuriousError, PermissionError): """ Raised when you can't do something due to the hierarchy. """
jonnyniv/boost_converter
host/gui/GTK+/share/glib-2.0/gdb/glib.py
Python
apache-2.0
7,426
0.010908
import gdb # This is not quite right, as local vars may override symname def read_global_var (symname): return gdb.selected_frame().read_var(symname) def g_quark_to_string (quark): if quark == None: return None quark = long(quark) if quark == 0: return None try: val = read_global_var ("quarks") max_q = long(read_global_var ("quark_seq_id")) except: try: val = read_global_var ("g_quarks") max_q = long(read_global_var ("g_quark_seq_id")) except: return None; if quark < max_q: return val[quark].string() return None # We override the node printers too, so that node->next is not expanded class GListNodePrinter: "Prints a GList node" def __init__ (self, val): self.val = val def to_string (self): return "{data=%s, next=0x%x, prev=0x%x}" % (str(self.val["data"]), long(self.val["next"]), long(self.val["prev"])) class GSListNodePrinter: "Prints a GSList node" def __init__ (self, val): self.val = val def to_string (self): return "{data=%s, next=0x%x}" % (str(self.val["data"]), long(self.val["next"])) class GListPrinter: "Prints a GList" class _iterator: def __init__(self, head, listtype): self.link = head self.listtype = listtype self.count = 0 def __iter__(self): return self def next(self): if self.link == 0: raise StopIteration data = self.link['data'] self.link = self.link['next'] count = self.count self.count = self.count + 1 return ('[%d]' % count, data) def __init__ (self, val, listtype): self.val = val self.listtype = listtype def children(self): return self._iterator(self.val, self.listtype) def to_string (self): return "0x%x" % (long(self.val)) def display_hint (self): return "array" class GHashPrinter: "Prints a GHashTable" class _iterator: def __init__(self, ht, keys_are_strings): self.ht = ht if ht != 0: self.keys = ht["keys"] self.values = ht["values"] self.hashes = ht["hashes"] self.size = ht["size"] self.pos = 0 self.keys_are_strings = keys_are_strings self.value = None def __iter__(self): return self def next(self): if self.ht == 0: raise StopIteration if self.value != None: v = self.value self.value = None return v while long(self.pos) < long(self.size): self.pos = self.pos + 1 if long (self.hashes[self.pos]) >= 2: key = self.keys[self.pos] val = self.values[self.pos] if self.keys_are_strings: key = key.cast (gdb.lookup_type("char").pointer()
) # Queue value for next result self.value = ('[%dv]'% (self.pos), val) # Return key return ('[%dk]'% (self.pos), key) raise StopIteration def __init__ (self, val): self.val = val self.keys_are_strings = False try: string_hash = r
ead_global_var ("g_str_hash") except: string_hash = None if self.val != 0 and string_hash != None and self.val["hash_func"] == string_hash: self.keys_are_strings = True def children(self): return self._iterator(self.val, self.keys_are_strings) def to_string (self): return "0x%x" % (long(self.val)) def display_hint (self): return "map" def pretty_printer_lookup (val): if is_g_type_instance (val): return GTypePrettyPrinter (val) def pretty_printer_lookup (val): # None yet, want things like hash table and list type = val.type.unqualified() # If it points to a reference, get the reference. if type.code == gdb.TYPE_CODE_REF: type = type.target () if type.code == gdb.TYPE_CODE_PTR: type = type.target().unqualified() t = str(type) if t == "GList": return GListPrinter(val, "GList") if t == "GSList": return GListPrinter(val, "GSList") if t == "GHashTable": return GHashPrinter(val) else: t = str(type) if t == "GList": return GListNodePrinter(val) if t == "GSList *": return GListPrinter(val, "GSList") return None def register (obj): if obj == None: obj = gdb obj.pretty_printers.append(pretty_printer_lookup) class ForeachCommand (gdb.Command): """Foreach on list""" def __init__ (self): super (ForeachCommand, self).__init__ ("gforeach", gdb.COMMAND_DATA, gdb.COMPLETE_SYMBOL) def valid_name (self, name): if not name[0].isalpha(): return False return True def parse_args (self, arg): i = arg.find(" ") if i <= 0: raise Exception ("No var specified") var = arg[:i] if not self.valid_name(var): raise Exception ("Invalid variable name") while i < len (arg) and arg[i].isspace(): i = i + 1 if arg[i:i+2] != "in": raise Exception ("Invalid syntax, missing in") i = i + 2 while i < len (arg) and arg[i].isspace(): i = i + 1 colon = arg.find (":", i) if colon == -1: raise Exception ("Invalid syntax, missing colon") val = arg[i:colon] colon = colon + 1 while colon < len (arg) and arg[colon].isspace(): colon = colon + 1 command = arg[colon:] return (var, val, command) def do_iter(self, arg, item, command): item = item.cast (gdb.lookup_type("void").pointer()) item = long(item) to_eval = "set $%s = (void *)0x%x\n"%(arg, item) gdb.execute(to_eval) gdb.execute(command) def slist_iterator (self, arg, container, command): l = container.cast (gdb.lookup_type("GSList").pointer()) while long(l) != 0: self.do_iter (arg, l["data"], command) l = l["next"] def list_iterator (self, arg, container, command): l = container.cast (gdb.lookup_type("GList").pointer()) while long(l) != 0: self.do_iter (arg, l["data"], command) l = l["next"] def pick_iterator (self, container): t = container.type.unqualified() if t.code == gdb.TYPE_CODE_PTR: t = t.target().unqualified() t = str(t) if t == "GSList": return self.slist_iterator if t == "GList": return self.list_iterator raise Exception("Invalid container type %s"%(str(container.type))) def invoke (self, arg, from_tty): (var, container, command) = self.parse_args(arg) container = gdb.parse_and_eval (container) func = self.pick_iterator(container) func(var, container, command) ForeachCommand ()
cdemers/networktools
UDPNetTests/test_server.py
Python
mit
3,592
0.003898
import socket UDP_IP = "0.0.0.0" UDP_PORT = 5005 sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.bind((UDP_IP, UDP_PORT)) packet_stack = {} sequence_pointer = 1 peak_stack_size = 0 print "Listening on IP %s, Port: %s" % (UDP_IP, UDP_PORT) def decode_packet(packet): seq = int(packet[0:16]) size = int(packet[16:32]) pl = packet[32:] return (seq, size, pl) def dump_state(peak_stack_size, packet_stack): print "------ Report ------" print "Peak Stack Size: %i" % peak_stack_size stack_size = len(packet_stack) print "Curent Stack Size: %i" % stack_size if stack_size == 0: print "Stack is clean." return highest_packet_seq = 0 lowest_packet_seq = 999999 for packet in packet_stack: if packet > highest_packet_seq: highest_packet_seq = packet if packet < lowest_packet_seq: lowest_packet_seq = packet print "Lowest: %i Highest: %i" % (lowest_packet_seq, highest_packet_seq) missing_packets = 0 for i in range(lowest_packet_seq, highest_packet_seq): if i not in packet_stack: missing_packets += 1 print "Missing packet between %i and %i is %i" % (lowest_packet_seq, highest_packet_seq, missing_packets) try: while True: data, addr = sock.recvfrom(64536) # 64K Buffer Size (seq, size, pl) = decode_packet(data) print "Sequence Number: %i Size: %i" % ( seq, size) print "Src IP: %s Src Port: %s" % addr # print "Payload: '%s'" % pl print "Data: '%s'" % data # Payload starting with C is for Control commands print "L2 Preamble: '%s'" % pl[0:1] if pl[0:1] == "C": command = int(pl[1:5]) print "Command: '%s'" % command if command == 1: print "Command 1: Display the debug trace." dump_state(peak_stack_size, packet_stack) elif command == 2: print "Command 2: Clear the stack and reset the sequence_pointer." sequence_pointer = 1 peak_stack_size = 0 packet_stack.clear() print "\n" continue elif command == 3: print "Command 3: Exit." exit(0) if len(data) == size: print "Packet size validation confirmed." else: print "Packet size error! %i != %i" % (len(data), size) raise Exception("Packet Size Error.") if(seq == sequence_pointer): print "Received packet (%i) in sequence, passing over." % sequence_pointer sequence_pointer += 1 while sequence_pointer in packet_stack: print "Next packet (%i) found in stack, poping out of stack." % sequence_pointer packet_stack.pop(sequence_pointer, None) sequence_pointer += 1 else: print "Received packet seq %i out of order, pushing onto stack." % seq packet_stack[seq] = data stack_size = len(packet_stack) print "Current Stack Siz
e: %i" % stack_size if stack_size > peak_stack_size: peak_stack_size = s
tack_size print "\n" except KeyboardInterrupt: dump_state(peak_stack_size, packet_stack) except: print "ERROR!" print "Data: '%s'" % data print addr print "Sequence Index: %i" % sequence_pointer print "Peak Stack Size: %i" % peak_stack_size stack_size = len(packet_stack) print "Curent Stack Size: %i" % stack_size raise
ericholscher/pinax
pinax/apps/groups/templatetags/group_tags.py
Python
mit
3,311
0.003926
from django import template from django.utils.encoding import smart_str from django.core.urlresolvers import reverse, NoReverseMatch from django.db.models import get_model from django.db.models.query import QuerySet register = template.Library() class GroupURLNode(template.Node): def __init__(self, view_name, group, kwargs, asvar): self.view_name = view_name self.group = group self.kwargs = kwargs self.asvar = asvar def render(self, context): url = "" group = self.group.resolve(context) kwargs = {} for k, v in self.kwargs.items(): kwargs[smart_str(k, "ascii")] = v.resolve(context) if group: bridge = group.content_bridge try: url = bridge.reverse(self.view_name, group, kwargs=kwargs) except NoReverseMatch: if self.asvar is None: raise else: tr
y: url = reverse(self.view_name, kwargs=kwargs) except NoReverseMatch: if self.asvar is None: raise if self.asvar:
context[self.asvar] = url return "" else: return url class ContentObjectsNode(template.Node): def __init__(self, group_var, model_name_var, context_var): self.group_var = template.Variable(group_var) self.model_name_var = template.Variable(model_name_var) self.context_var = context_var def render(self, context): group = self.group_var.resolve(context) model_name = self.model_name_var.resolve(context) if isinstance(model_name, QuerySet): model = model_name else: app_name, model_name = model_name.split(".") model = get_model(app_name, model_name) context[self.context_var] = group.content_objects(model) return "" @register.tag def groupurl(parser, token): bits = token.contents.split() tag_name = bits[0] if len(bits) < 3: raise template.TemplateSyntaxError("'%s' takes at least two arguments" " (path to a view and a group)" % tag_name) view_name = bits[1] group = parser.compile_filter(bits[2]) args = [] kwargs = {} asvar = None if len(bits) > 3: bits = iter(bits[3:]) for bit in bits: if bit == "as": asvar = bits.next() break else: for arg in bit.split(","): if "=" in arg: k, v = arg.split("=", 1) k = k.strip() kwargs[k] = parser.compile_filter(v) elif arg: raise template.TemplateSyntaxError("'%s' does not support non-kwargs arguments." % tag_name) return GroupURLNode(view_name, group, kwargs, asvar) @register.tag def content_objects(parser, token): """ {% content_objects group "tasks.Task" as tasks %} """ bits = token.split_contents() if len(bits) != 5: raise template.TemplateSyntaxError("'%s' requires five arguments." % bits[0]) return ContentObjectsNode(bits[1], bits[2], bits[4])
tri2sing/PyOO
patterns/composite/entities.py
Python
gpl-2.0
1,989
0.012569
''' Created on Dec 2, 2015 @author: Sameer Adhikari ''' # Class that represents the common operations between # composite and leaf/primitive nod
es in the hierarchy # This is a simulation which lacks a lot of operations class Component(object): def __init__(self, name): self.name = name def move(self, destination_path): destination_folder = get_folder(destination_path) del self.parent.children[self.name] # Remove folder from current location
destination_folder.children[self.name] = self # Move to new folder location self.parent = destination_folder # Set up traversal path to root def delete(self): del self.parent.children[self.name] # Remove folder from current location def add_child(self, child): child.parent = self self.children[child.name] = child # Class that represent the composite node in the hierarchy class Folder(Component): def __init__(self, name): super().__init__(name) self.children = {} # A folder can have folders or files # Class the represents the leaf/primitve node, which does not have children class File(Component): def __init__(self, name, contents): super().__init__(name) self.contents = contents # Module level variable to represent the root of a filesystem root = Folder('') # This function causes a cyclic dependency. # It operates on component but requires a root folder. # But, folder is a subclass of component. # Python's dynamic typing and handling of module variables helps out. def get_folder(path): ''' Returns the folder node to which the string path refers ''' folders_along_path = path.split('/')[1:] # Ignore the initial empty string from split node = root # Start at the top for folder_name in folders_along_path: # Traverse down the tree node = node.children[folder_name] # Get pointer to the node at the current tree level return node
veselosky/webquills
webquills/sites/middleware.py
Python
apache-2.0
275
0
from .models import Site class Si
tesMiddleware(object): def __init__(self, get_re
sponse): self.get_response = get_response def __call__(self, request): request.site = Site.objects._get_for_request(request) return self.get_response(request)
hoangt/gem5v
tests/configs/o3-timing-mp.py
Python
bsd-3-clause
2,805
0.010339
# Copyright (c) 2006-2007 The Regents of The University of Michigan # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Ron Dreslinski import m5
from m5.objects import * m5.util.addToPath('../configs/common') from Caches import * nb_cores = 4 cpus = [ DerivO3CPU(cpu_id=i) for i in xrange(nb_cores) ] # system simulated system = System(cpu = cpus, physmem = SimpleMemor
y(), membus = CoherentBus()) # l2cache & bus system.toL2Bus = CoherentBus(clock = '2GHz') system.l2c = L2(clock = '2GHz', size='4MB', assoc=8) system.l2c.cpu_side = system.toL2Bus.master # connect l2c to membus system.l2c.mem_side = system.membus.slave # add L1 caches for cpu in cpus: cpu.addPrivateSplitL1Caches(L1(size = '32kB', assoc = 1), L1(size = '32kB', assoc = 4)) # create the interrupt controller cpu.createInterruptController() # connect cpu level-1 caches to shared level-2 cache cpu.connectAllPorts(system.toL2Bus, system.membus) cpu.clock = '2GHz' # connect memory to membus system.physmem.port = system.membus.master # connect system port to membus system.system_port = system.membus.slave # ----------------------- # run simulation # ----------------------- root = Root( full_system = False, system = system ) root.system.mem_mode = 'timing' #root.trace.flags="Bus Cache" #root.trace.flags = "BusAddrRanges"
gen2brain/comic-utils
setup.py
Python
gpl-3.0
576
0.0625
#!/usr/bin/env python from distutils.core import setup setup(name = "comic-utils",
version = "0.4", description = "Comic Utils", author = "Milan Nikolic", author_email = "gen2brain@gmail.com", license = "GNU GPLv3", url = "https://github.com/gen2brain/comic-utils", packages = ["comicutils", "comicutils.ui"], package_dir = {"comicutils": "comicutils"}
, scripts = ["comic-convert", "comic-thumbnails"], requires = ["Image", "PngImagePlugin"], platforms = ["Linux", "Windows"] )
noironetworks/neutron
neutron/db/migration/alembic_migrations/versions/queens/expand/594422d373ee_fip_qos.py
Python
apache-2.0
1,485
0
# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from alembic import op import sqlalchemy as sa from neutron_lib.db import constants as db_const from neutron.db import migration """fip qos Revision ID: 594422d373ee Revises: 7d32f979895f Create Date: 2016-04-26 17:
16:10.323756 """ # revision identifiers, used by Alembic. revision = '594422d373ee' down_revision = '7d32f979895f' # milestone identifier, used by neutron-db-manage neutron_milestone = [migration.QUEENS] def upgrade(): op.create_table( 'qos_fip_policy_bindings', sa.Column('policy_id', sa.String(length=db_
const.UUID_FIELD_SIZE), sa.ForeignKey('qos_policies.id', ondelete='CASCADE'), nullable=False), sa.Column('fip_id', sa.String(length=db_const.UUID_FIELD_SIZE), sa.ForeignKey('floatingips.id', ondelete='CASCADE'), nullable=False, unique=True))
browning/shows
shows/shows/settings/local.py
Python
mit
1,629
0.00798
"""Development settings and globals.""" from os.path import join, normpath from base import * ########## DEBUG CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = True # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug TEMPLATE_DEBUG = DEBUG ########## END DEBUG CONFIGURATION ########## EMAIL CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' ########## END EMAIL CONFIGURATION ########## DATABASE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': normpath(join(DJANGO_ROOT, 'default.db')), 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } ########## END DATABASE CONFIGURATION ########## CACHE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#caches CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } ########## END CACHE CONFIGURATION ########## TOOLBAR CONFIGURATION # See: https://github.com/django-debug-t
oolbar/django-debug-toolbar#installation INSTALLED_APPS += ( 'debug_toolbar', ) # See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation INTERNAL_IPS = ('127.0.0.1',) # See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation MIDDLEWARE_CLASSES += ( 'debug_toolbar.middleware.DebugToolbarMiddlew
are', ) ########## END TOOLBAR CONFIGURATION
eightHundreds/irides
tests/mocks/pictures.py
Python
gpl-3.0
743
0.002692
import pytest from app import models from app.extensions import db @pytest.fixture(scope='function') def mock_picture(): def make_mock_picture(user=None, tags=None, despriction=None, address=None): _picture = models.Picture( userId=str(user.id), despriction=despriction or 'testdes', address=address or 'testaddress'
)
_picture.tags = [ models.Tag(tag=tags or 'testtags') ] db.session.add(_picture) db.session.commit() # _tags = models.Tags( # picId=_picture.id, # tag='testtag' # ) # db.session.add(_tags) # db.session.commit() return _picture return make_mock_picture
eysho/BestKnownGame-Coins---Source
share/qt/clean_mac_info_plist.py
Python
mit
922
0.016269
#!/usr/bin/env python # Jonas Schnelli, 2013 # make s
ure the Nichts-Qt.app contains t
he right plist (including the right version) # fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267) from string import Template from datetime import date bitcoinDir = "./"; inFile = bitcoinDir+"/share/qt/Info.plist" outFile = "Nichts-Qt.app/Contents/Info.plist" version = "unknown"; fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro" for line in open(fileForGrabbingVersion): lineArr = line.replace(" ", "").split("="); if lineArr[0].startswith("VERSION"): version = lineArr[1].replace("\n", ""); fIn = open(inFile, "r") fileContent = fIn.read() s = Template(fileContent) newFileContent = s.substitute(VERSION=version,YEAR=date.today().year) fOut = open(outFile, "w"); fOut.write(newFileContent); print "Info.plist fresh created"
KevinGrahamFoster/django-cities-light
test_project/tests.py
Python
mit
1,189
0.004205
# -*- encoding: utf-8 -*- from __future__ import unicode_literals from django.utils import unittest from django.test.client import RequestFactory from django.db.models import query from django.contrib.admin.sites import AdminSite from cities_light import admin as cl_admin from cities_light import models as cl_models class AdminTestCase(unittest.TestCase): def setUp(self): self.factory = RequestFactory() self.admin_site = AdminSite() def testCityChangeList(self): request = self.factory.get('/some/path/', data={'q': 'some query'}) city_admin
= cl_admin.CityAdmin(cl_models.City, self.admin_site) changelist = cl_admin.CityChangeList( request, cl_models.City, cl_admin.CityAdmin.list_display, cl_admin.CityAdmin.list_display_links, cl_admin.CityAdmin.list_filter, cl_admin.CityAdmin.date_hierarchy, cl_admin.CityAdmin.search_fields, cl_admin.CityAdmin.list_select_related, cl_admin.CityAdmin.list_per_page, cl_admin.CityAdmin.list_max_show_all, cl_admin.CityAd
min.list_editable, city_admin) self.assertIsInstance(changelist.get_query_set(request), query.QuerySet)
GoogleChromeLabs/chromium-bidi
tests/_helpers.py
Python
apache-2.0
4,890
0.000613
# Copyright 2021 Google LLC. # Copyright (c) Microsoft Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import json import os import pytest import websockets _command_counter = 1 def get_next_command_id(): global _command_counter _command_counter += 1 return _command_counter @pytest.fixture async def websocket(): port = os.getenv('PORT', 8080) url = f'ws://localhost:{port}' async with websockets.connect(url) as connection: yield connection # noinspection PyUnusedFunction @pytest.fixture async def context_id(websocket): # Note: there can be a race condition between initially created context's # events and following subscription commands. Sometimes subscribe is called # before the initial context emitted `browsingContext.contextCreated`, # `browsingContext.domContentLoaded`, or `browsingContext.load` events, # which makes events verification way harder. Navigation command guarantees # there will be no follow-up events, as it uses `interactive` flag. # TODO: find a way to avoid mentioned race condition properly. open_context_id = await get_open_context_id(websocket) await goto_url(websocket, open_context_id, "about:blank") return open_context_id @pytest.fixture(autouse=True) async def before_each_test(websocket): # This method can be used for browser state preparation. assert True async def subscribe(websocket, event_names, context_ids=None): if isinstance(event_names, str): event_names = [event_names] command = { "method": "session.subscribe", "params": { "events": event_names}} if context_ids is not None: command["params"]["contexts"] = context_ids await execute_command(websocket, command) # Compares 2 objects recursively ignoring values of specific attributes. def recursiveCompare(expected, actual, ignore_attributes=[]): assert type(expected) == type(actual) if type(expected) is list: assert len(expected) == len(actual) for index, val in enumerate(expected): recursiveCompare(expected[index], actual[index], ignore_attributes) return if type(expected) is dict: assert expected.keys() == actual.keys(), \ f"Key sets should be the same: " \ f"\nNot present: {set(expected.keys()) - set(actual.keys())}" \ f"\nUnexpected: {set(actual.keys()) - set(expected.keys())}" for index, val in enumerate(expected): if val not in ignore_attributes: recursiveCompare(expected[val], actual[val], ignore_attri
butes) return assert expected == actual # Returns an id of an open context. async def get_open_context_id(websocket): result = await execute_command(websocket, { "method": "browsingContext.getTree", "params": {}}) return result['contexts'][0]['context'] async def send_JSON_command(websocket, command): if 'id' not in command: command_id = get_next_command_id() command['id'] = command_id await websocket.send(
json.dumps(command)) async def read_JSON_message(websocket): return json.loads(await websocket.recv()) # Open given URL in the given context. async def goto_url(websocket, context_id, url): await execute_command(websocket, { "method": "browsingContext.navigate", "params": { "url": url, "context": context_id, "wait": "interactive"}}) # noinspection PySameParameterValue async def execute_command(websocket, command, result_field='result'): command_id = get_next_command_id() command['id'] = command_id await send_JSON_command(websocket, command) while True: # Wait for the command to be finished. resp = await read_JSON_message(websocket) if 'id' in resp and resp['id'] == command_id: assert result_field in resp, \ f"Field `{result_field}` should be in the result object:" \ f"\n {resp}" return resp[result_field] # Wait and return a specific event from Bidi server async def wait_for_event(websocket, event_method): while True: event_response = await read_JSON_message(websocket) if 'method' in event_response and event_response['method'] == event_method: return event_response
RecursiveGreen/spradio-django
savepointradio/radio/managers.py
Python
mit
278
0
from django.db import models from .querysets import
SongQuerySet class SongManager(models.Manager): def get_queryset(self): return SongQuerySet(self.model, using=self._db) def available(self): return self.get_queryset().song
s().enabled().published()
hstau/manifold-cryo
setup.py
Python
gpl-2.0
792
0.002525
from setuptools import setup def rea
dme(): with open('README.rst.example') as f: return f.read() setup(name='manifold_gui', version='0.1', description='GUI for a manifold technique', long_description=readme(), classifiers=[ 'Development Status :: 1 - Alpha', 'Environment :: Console', 'Environment :: X11 Applications', 'License :: OSI Approved
:: GNU General Public License (GPL)', 'Programming Language :: Python :: 2.7 :: chimera', 'Intended Audience :: End Users/Desktop', ], keywords='manifold chimera', author='Hstau Y Liao', platform='linux chimera', author_email='hstau.y.liao@gmail.com', packages=['gui'], include_package_data=True, zip_safe=False)
atimothee/django-playground
django_playground/movie_library/migrations/0006_auto__chg_field_movie_studio.py
Python
bsd-3-clause
4,380
0.007763
# -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Changing field 'Movie.studio' db.alter_column(u'movie_library_movie', 'studio_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['movie_library.Studio'], null=True)) def backwards(self, orm): # Changing field 'Movie.studio' db.alter_column(u'movie_library_movie', 'studio_id', self.gf('django.db.models.fields.related.ForeignKey')(default='', to=orm['movie_library.Studio'])) models = { u'movie_library.actor': { 'Meta': {'object_name': 'Actor'}, 'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}) }, u'movie_library.director': { 'Meta': {'object_name': 'Director'}, 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'nick_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}) }, u'movie_library.genre': { 'Meta': {'object_name': 'Genre'}, 'explanation': ('django.db.models.fields.TextField', [], {'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'movie_library.movie': { 'Meta': {'object_name': 'Movie'}, 'actor': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Actor']", 'symmetrical': 'False'}), 'cover_art': ('django.db.models.fields.files.ImageField', [], {
'max_length': '100'}), 'director': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Director']", 'symmetrical': 'False'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'release_year': ('django.db.models.fields.IntegerField', [], {
'blank': 'True'}), 'studio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['movie_library.Studio']", 'null': 'True', 'blank': 'True'}), 'synopsis': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'writer': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['movie_library.Writer']", 'symmetrical': 'False'}) }, u'movie_library.studio': { 'Meta': {'object_name': 'Studio'}, 'address': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'country': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'state_province': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}) }, u'movie_library.writer': { 'Meta': {'object_name': 'Writer'}, 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['movie_library']
moreati/numpy
runtests.py
Python
bsd-3-clause
15,724
0.001717
#!/usr/bin/env python """ runtests.py [OPTIONS] [-- ARGS] Run tests, building the project first. Examples:: $ python runtests.py $ python runtests.py -s {SAMPLE_SUBMODULE} $ python runtests.py -t {SAMPLE_TEST} $ python runtests.py --ipython $ python runtests.py --python somescript.py $ python runtests.py --bench Run a debugger: $ gdb --args python runtests.py [...other args...] Generate C code coverage listing under build/lcov/: (requires http://ltp.sourceforge.net/coverage/lcov.php) $ python runtests.py --gcov [...other args...] $ python runtests.py --lcov-html """ # # This is a generic test runner script for projects using Numpy's test # framework. Change the following values to adapt to your project: # PROJECT_MODULE = "numpy" PROJECT_ROOT_FILES = ['numpy', 'LICENSE.txt', 'setup.py'] SAMPLE_TEST = "numpy/linalg/tests/test_linalg.py:test_byteorder_check" SAMPLE_SUBMODULE = "linalg" EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache', '/usr/local/lib/ccache', '/usr/local/lib/f90cache'] # --------------------------------------------------------------------- if __doc__ is None: __doc__ = "Run without -OO if you want usage info" else: __doc__ = __doc__.format(**globals()) import sys import os # In case we are run from the source directory, we don't want to import the # project from there: sys.path.pop(0) import shutil import subprocess import time import imp from argparse import ArgumentParser, REMAINDER ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) def main(argv): parser = ArgumentParser(usage=__doc__.lstrip()) parser.add_argument("--verbose", "-v", action="count", default=1, help="more verbosity") parser.add_argument("--no-build", "-n", action="store_true", default=False, help="do not build the project (use system installed version)") parser.add_argument("--build-only", "-b", action="store_true", default=False, help="just build, do not run any tests") parser.add_argument("--doctests", action="store_true", default=False, help="Run doctests in module") parser.add_argument("--coverage", action="store_true", default=False, help=("report coverage of project code. HTML output goes " "under build/coverage")) parser.add_argument("--gcov", action="store_true", default=False, help=("enable C code coverage via gcov (requires GCC). " "gcov output goes to build/**/*.gc*")) parser.add_argument("--lcov-html", action="store_true", default=False, help=("produce HTML for C code coverage information " "from a previous run with --gcov. " "HTML output goes to build/lcov/")) parser.add_argument("--mode", "-m", default="fast", help="'fast', 'full', or something that could be " "passed to nosetests -A [default: fast]") parser.add_argument("--submodule", "-s", default=None, help="Submodule whose tests to run (cluster, constants, ...)") parser.add_argument("--pythonpath", "-p", default=None, help="Paths to prepend to PYTHONPATH") parser.add_argument("--tests", "-t", action='append', help="Specify tests to run") parser.add_argument("--python", action="store_true", help="Start a Python shell with PYTHONPATH set") parser.add_argument("--ipython", "-i", action="store_true", help="Start IPython shell with PYTHONPATH set") parser.add_argument("--shell", action="store_true", help="Start Unix shell with PYTHONPATH set") parser.add_argument("--debug", "-g", action="store_true", help="Debug build") parser.add_argument("--parallel", "-j", type=int, default=0, help="Number of parallel jobs during build") parser.add_argument("--show-build-log", action="store_true", help="Show build output rather than using a log file") parser.add_argument("--bench", action="store_true", help="Run benchmark suite instead of test suite") parser.add_argument("--bench-compare", action="store", metavar="COMMIT", help=("Compare benchmark results to COMMIT. " "Note that you need to commit your changes first!")) parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER, help="Arguments to pass to Nose, Python or shell") args = parser.parse_args(argv) if args.bench_compare: args.bench = True args.no_build = True # ASV does the building if args.lcov_html: # generate C code coverage output lcov_generate() sys.exit(0) if args.pythonpath: for p in reversed(args.pythonpath.split(os.pathsep)): sys.path.insert(0, p) if args.gcov: gcov_reset_counters() if args.debug and args.bench: print("*** Benchmarks should not be run against debug " "version; remove -g flag ***") if not args.no_build: site_dir = build_project(args) sys.path.insert(0, site_dir) os.environ['PYTHONPATH'] = site_dir extra_argv = args.args[:] if extra_argv and extra_argv[0] == '--': extra_argv = extra_argv[1:] if args.python: # Debugging issues with warnings is much easier if you can see them print("Enabling display of all warnings") import warnings; warnings.filterwarnings("always") if extra_argv: # Don't use subprocess, since we don't want to include the # current path in PYTHONPATH. sys.argv = extra_argv with open(extra_argv[0], 'r') as f: script = f.read() sys.modules['__main__'] = imp.new_module('__main__') ns = dict(__name__='__main__', __file__=extra_argv[0]) exec_(script, ns) sys.exit(0) else: import code code.interact() sys.exit(0) if args.ipython: # Debugging issues with warnings is much easier if you can see them print("Enabling display of all warnings and pre-importing numpy as np") import warnings; warnings.filterwarnings("always") import IPython import numpy as np IPython.embed(user_ns={"np": np}) sys.exit(0) if args.shell: shell = os.environ.get('SHELL', 'sh') print("Spawning a Unix shell...") os.execv(shell, [shell] + extra_argv) sys.exit(1) if args.coverage: dst_dir = os.path.join(ROOT_DIR, 'build', 'coverage') fn = os.path.join(dst_dir, 'coverage_html.js') if os.path.isdir(dst_dir) and os.path.isfile(fn): shutil.rmtree(dst_dir) extra_argv += ['--cover-html', '--cover-html-dir='+dst_dir] if args.bench: # Run ASV items = extra_argv if args.tests: items += args.tests if args.submodule: items += [args.submodule] bench_args = [] for a in items: bench_args.extend(['--bench', a]) if not args.bench_compare: cmd = ['asv', 'run', '-n', '-e', '--python=same'] + bench_args os.chdir(os.path.join(ROOT_DIR, 'benchmarks')) os.execvp(cmd[0],
cmd) sys.exit(1) else:
commits = [x.strip() for x in args.bench_compare.split(',')] if len(commits) == 1: commit_a = commits[0] commit_b = 'HEAD' elif len(commits) == 2: commit_a, commit_b = commits else: p.error("Too many commits to compare benchmarks for") # Check for uncommitted files if commit_b == 'HEAD':
fredex42/gnmvidispine
gnmvidispine/vs_metadata.py
Python
gpl-2.0
5,748
0.008525
__author__ = 'Andy Gallagher <andy.gallagher@theguardian.com>' import xml.etree.ElementTree as ET import dateutil.parser from .vidispine_api import always_string class VSMetadata: def __init__(self, initial_data={}): self.contentDict=initial_data self.primaryGroup = None def addValue(self,key,value): if key in self.contentDict: self.contentDict[key].append(value) else: self.contentDict[key]=[] self.contentDict[key].append(value) def setPrimaryGroup(self,g): self.primaryGroup = g def toXML(self,mdGroup=None): from datetime import datetime xmldoc=ET.ElementTree() ns = "{http://xml.vidispine.com/schema/vidispine}" rootEl=ET.Element('{0}MetadataDocument'.format(ns)) xmldoc._setroot(rootEl) timespanEl=ET.Element('{0}timespan'.format(ns), attrib={'start': '-INF', 'end': '+INF'}) rootEl.append(timespanEl) if mdGroup is None and self.primaryGroup is not None: mdGroup = self.primaryGroup if(mdGroup): groupEl=ET.Element('{0}group'.format(ns)) groupEl.text=mdGroup rootEl.append(groupEl) for key,value in list(self.contentDict.items()): fieldEl=ET.Element('{0}field'.format(ns)) nameEl=ET.Element('{0}name'.format(ns)) nameEl.text = key fieldEl.append(nameEl) if not isinstance(value,list): value = [value] for line in value: valueEl=ET.Element('{0}value'.format(ns)) if isinstance(line,datetime): line = line.strftime("%Y-%m-%dT%H:%M:%S%Z") valueEl.text = always_string(line) fieldEl.append(valueEl) timespanEl.append(fieldEl) return ET.tostring(rootEl,encoding="utf8").decode("utf8") class VSMetadataMixin(object):
_xmlns = "{http://xml.vidispine.com/schema/vidispine}" @staticmethod def _safe_get_attrib(xmlnode, attribute, default): try: return xmlnode.attrib[attribute] except AttributeError: return default @staticmethod def _safe_get_subvalue(xmlnode, subnode_name, default): try: node = xmlnode.find(subnode_name)
if node is not None: return node.text else: return default except AttributeError: return default class VSMetadataValue(VSMetadataMixin): def __init__(self, valuenode=None, uuid=None): self.user = None self.uuid = None self.timestamp = None self.change = None self.value = None if valuenode is not None: self.uuid = self._safe_get_attrib(valuenode,"uuid", None) self.user = self._safe_get_attrib(valuenode, "user", None) try: self.timestamp = dateutil.parser.parse(self._safe_get_attrib(valuenode,"timestamp", None)) except TypeError: #dateutil.parser got nothing self.timestamp = None self.change = self._safe_get_attrib(valuenode, "change", None) self.value = valuenode.text elif uuid is not None: self.uuid = uuid def __repr__(self): return "VSMetadataValue(\"{0}\")".format(self.value) def __eq__(self, other): return other.uuid==self.uuid class VSMetadataReference(VSMetadataMixin): def __init__(self, refnode=None, uuid=None): """ Initialises, either to an empty reference, to an existing uuid or to an xml fragment :param uuid: string representing the uuid of something to reference :param refnode: pointer to an elementtree node of <referenced> in a MetadataDocument """ if refnode is not None: self.uuid = self._safe_get_attrib(refnode,"uuid",None) self.id = self._safe_get_attrib(refnode,"id",None) self.type = self._safe_get_attrib(refnode,"type",None) if refnode is None and uuid is not None: self.uuid=uuid self.id = None self.type = None def __repr__(self): return "VSMetadataReference {0} to {1} {2}".format(self.uuid,self.type,self.id) def __eq__(self, other): return other.uuid==self.uuid class VSMetadataAttribute(VSMetadataMixin): """ this class represents the full metadata present in an xml <field> entry """ def __init__(self, fieldnode=None): if fieldnode is not None: self.uuid = self._safe_get_attrib(fieldnode,"uuid", None) self.user = self._safe_get_attrib(fieldnode, "user", None) try: self.timestamp = dateutil.parser.parse(self._safe_get_attrib(fieldnode,"timestamp", None)) except TypeError: #dateutil.parser got nothing self.timestamp = None self.change = self._safe_get_attrib(fieldnode,"change",None) self.name = self._safe_get_subvalue(fieldnode, "{0}name".format(self._xmlns), None) self.values = [VSMetadataValue(value_node) for value_node in fieldnode.findall('{0}value'.format(self._xmlns))] self.references = [VSMetadataReference(ref_node) for ref_node in fieldnode.findall('{0}referenced'.format(self._xmlns))] else: self.uuid = None self.user = None self.timestamp = None self.change = None self.name = None self.values = [] self.references = [] def __eq__(self, other): return other.uuid==self.uuid
gelbander/blues
blues/python.py
Python
mit
997
0.002006
""" Python Blueprint ================ Does not install python itself, only develop and setup tools. Contains pip helper for other blueprints to use. **Fabric environment:** .. code-block:: yaml blueprints: - blues.python """ from fabric.decorators import task from refabric.api import run, info from refabric.context_managers import
sudo from . import debian __
all__ = ['setup'] pip_log_file = '/tmp/pip.log' @task def setup(): """ Install python develop tools """ install() def install(): with sudo(): info('Install python dependencies') debian.apt_get('install', 'python-dev', 'python-setuptools') run('easy_install pip') run('touch {}'.format(pip_log_file)) debian.chmod(pip_log_file, mode=777) pip('install', 'setuptools', '--upgrade') def pip(command, *options): info('Running pip {}', command) run('pip {0} {1} -v --log={2} --log-file={2}'.format(command, ' '.join(options), pip_log_file))
mezz64/home-assistant
homeassistant/helpers/reload.py
Python
apache-2.0
5,950
0.00084
"""Class to reload platforms.""" from __future__ import annotations import asyncio from collections.abc import Iterable import logging from typing import Any from homeassistant import config as conf_util from homeassistant.const import SERVICE_RELOAD from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError from homeassistant.loader import async_get_integration from homeassistant.setup import async_setup_component from . import config_per_platform from .entity_platform import EntityPlatform, async_get_platforms from .typing import ConfigType # mypy: disallow-any-generics _LOGGER = logging.getLogger(__name__) async def async_reload_integration_platforms( hass: HomeAssistant, integration_name: str, integration_platforms: Iterable[str] ) -> None: """Reload an integration's platforms. The platform must support being re-setup. This functionality is only intended to be used for integrations that process Home Assistant data and make this available to other integrations. Examples are template, stats, derivative, utility meter. """ try: unprocessed_conf = await conf_util.async_hass_config_yaml(hass) except HomeAssistantError as err: _LOGGER.error(err) return tasks = [ _resetup_platform( hass, integration_name, integration_platform, unprocessed_conf ) for integration_platform in integration_platforms ] await asyncio.gather(*tasks) async def _resetup_platform( hass: HomeAssistant, integration_name: str, integration_platform: str, unprocessed_conf: ConfigType, ) -> None: """Resetup a platform.""" integration = await async_get_integration(hass, integration_platform) conf = await conf_util.async_process_component_config( hass, unprocessed_conf, integration ) if not conf: return root_config: dict[str, Any] = {integration_platform: []} # Extract only the config for template, ignore the rest. for p_type, p_config in config_per_platform(conf, integration_platform): if p_type != integration_name: continue root_config[integration_platform].append(p_config) component = integration.get_component() if hasattr(component, "async_reset_platform"): # If the integration has its own way to reset # use this method. await component.async_reset_platform(hass, integration_name) await component.async_setup(hass, root_config) return # If it's an entity platform, we use the entity_platform # async_reset method platform = async_get_platform_without_config_entry( hass, integration_name, integration_platform ) if platform: await _async_reconfig_platform(platform, root_config[integration_platform]) return if not root_config[integration_platform]: # No config for this platform # and it's not loaded. Nothing to do. return await _async_setup_platform( hass, integration_name, integration_platform, root_config[integration_platform] ) async def _async_setup_platform( hass: HomeAssistant, integration_name: str, integration_platform: str, platform_configs: list[dict[str, Any]], ) -> None: """Platform for the first time when new configuration is added.""" if integration_platform not in hass.data: await async_setup_component( hass, integration_platform, {integration_platform: platform_configs} ) return entity_component = hass.data[integration_platform] tasks = [ entity_component.async_setup_platform(integration_name, p_config) for p_config in platform_configs ] await asyncio.gather(*tasks) async def _async_reconfig_platform( platform: EntityP
latform, platform_configs: list[dict[str, Any]] ) -> None: """Reconfigure an already loaded platform.""" await platform.async_reset() tasks = [platform.async_setup(p_config) for p_config in platform_configs] await asyncio.gather(*tasks) asyn
c def async_integration_yaml_config( hass: HomeAssistant, integration_name: str ) -> ConfigType | None: """Fetch the latest yaml configuration for an integration.""" integration = await async_get_integration(hass, integration_name) return await conf_util.async_process_component_config( hass, await conf_util.async_hass_config_yaml(hass), integration ) @callback def async_get_platform_without_config_entry( hass: HomeAssistant, integration_name: str, integration_platform_name: str ) -> EntityPlatform | None: """Find an existing platform that is not a config entry.""" for integration_platform in async_get_platforms(hass, integration_name): if integration_platform.config_entry is not None: continue if integration_platform.domain == integration_platform_name: platform: EntityPlatform = integration_platform return platform return None async def async_setup_reload_service( hass: HomeAssistant, domain: str, platforms: Iterable[str] ) -> None: """Create the reload service for the domain.""" if hass.services.has_service(domain, SERVICE_RELOAD): return async def _reload_config(call: Event) -> None: """Reload the platforms.""" await async_reload_integration_platforms(hass, domain, platforms) hass.bus.async_fire(f"event_{domain}_reloaded", context=call.context) hass.helpers.service.async_register_admin_service( domain, SERVICE_RELOAD, _reload_config ) def setup_reload_service( hass: HomeAssistant, domain: str, platforms: Iterable[str] ) -> None: """Sync version of async_setup_reload_service.""" asyncio.run_coroutine_threadsafe( async_setup_reload_service(hass, domain, platforms), hass.loop, ).result()
jsoref/django
django/contrib/gis/utils/layermapping.py
Python
bsd-3-clause
27,310
0.00216
# LayerMapping -- A Django Model/OGR Layer Mapping Utility """ The LayerMapping class provides a way to map the contents of OGR vector files (e.g. SHP files) to Geographic-enabled Django models. For more information, please consult the GeoDjango documentation: https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/ """ import sys from decimal import Decimal, InvalidOperation as DecimalInvalidOperation from django.contrib.gis.db.models import GeometryField from django.contrib.gis.gdal import ( CoordTransform, DataSource, GDALException, OGRGeometry, OGRGeomType, SpatialReference, ) from django.contrib.gis.gdal.field import ( OFTDate, OFTDateTime, OFTInteger, OFTReal, OFTString, OFTTime, ) from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist from django.db import connections, models, router, transaction from django.utils import six from django.utils.encoding import force_text # LayerMapping exceptions. class LayerMapError(Exception): pass class InvalidString(LayerMapError): pass class InvalidDecimal(LayerMapError): pass class InvalidInteger(LayerMapError): pass class MissingForeignKey(LayerMapError): pass class LayerMapping(object): "A class that maps OGR Layers to GeoDjango Models." # Acceptable 'base' types for a multi-geometry type. MULTI_TYPES = {1: OGRGeomType('MultiPoint'), 2: OGRGeomType('MultiLineString'), 3: OGRGeomType('MultiPolygon'), OGRGeomType('Point25D').num: OGRGeomType('MultiPoint25D'), OGRGeomType('LineString25D').num: OGRGeomType('MultiLineString25D'), OGRGeomType('Polygon25D').num: OGRGeomType('MultiPolygon25D'), } # Acceptable Django field types and corresponding acceptable OGR # counterparts. FIELD_TYPES = { models.AutoField: OFTInteger, models.IntegerField: (OFTInteger, OFTReal, OFTString), models.FloatField: (OFTInteger, OFTReal), models.DateField: OFTDate, models.DateTimeField: OFTDateTime, models.EmailField: OFTString, models.TimeField: OFTTime, models.DecimalField: (OFTInteger, OFTReal), models.CharField: OFTString, models.SlugField: OFTString, models.TextField: OFTString, models.URLField: OFTString, models.BigIntegerField: (OFTInteger, OFTReal, OFTString), models.SmallIntegerField: (OFTInteger, OFTReal, OFTString), models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString), } def __init__(self, model, data, mapping, layer=0, source_srs=None, encoding='utf-8', transaction_mode='commit_on_success', transform=True, unique=None, using=None): """ A LayerMapping object is initialized using the given Model (not an instance), a DataSource (or string path to an OGR-supported data file), and a mapping dictionary. See the module level docstring for more details and keyword argument usage. """ # Getting the DataSource and the associated Layer. if isinstance(data, six.string_types): self.ds = DataSource(data, encoding=encoding) else: self.ds = data self.layer = self.ds[layer] self.using = using if using is not None else router.db_for_write(model) self.spatial_backend = connections[self.using].ops # Setting the mapping & model attributes. self.mapping = mapping self.model = model # Checking the layer -- initialization of the object will fail if # things don't check out before hand. self.check_layer() # Getting the geometry column associated with the model (an # exception will be raised if there is no geometry column). if connections[self.using].features.supports_transform: self.geo_field = self.geometry_field() else: transform = False # Checking the source spatial reference system, and getting # the coordinate transformation object (unless the `transform` # keyword is set to False) if transform: self.source_srs = self.check_srs(source_srs) self.transform = self.coord_transform() else: self.transform = transform # Setting the encoding for OFTString fields, if specified. if encoding: # Making sure the encoding exists, if not a LookupError # exception will be thrown. from codecs import lookup lookup(encoding) self.encoding = encoding else: self.encoding = None if unique: self.check_unique(unique) transaction_mode = 'autocommit' # Has to be set to autocommit. self.unique = unique else: self.unique = None # Setting the transaction decorator with the function in the # transaction modes dictionary. self.transaction_mode = transaction_mode if transaction_mode == 'autocommit': self.transaction_decorator = None elif transaction_mode == 'commit_on_success': self.transaction_decorator = transaction.atomic else: raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode) # #### Checking routines used during initialization #### def check_fid_range(self, fid_range): "This checks the `fid_range` keyword." if fid_range: if isinstance(fid_range, (tuple, list)): return slice(*fid_range) elif isinstance(fid_range, slice): return fid_range else: raise TypeError else: return None def check_layer(self): """ This checks the Layer metadata, and ensures that it is compatible with the mapping information and model. Unlike previous revisions, there is no need to increment through each feature in the Layer. """ # The geometry field of the model is set here. # TODO: Support more than one geometry field / model. However, this # depends on the GDAL Driver in use. self.geom_field = False self.fields = {} # Getting lists of the field names and the field types available in # the OGR Layer. ogr_fields = self.layer.fields ogr_field_types = self.layer.field_types # Function for determining if the OGR mapping field is in the Layer. def check_ogr_fld(ogr_map_fld): try: idx = ogr_fields.index(ogr_map_fld) except ValueError: raise LayerMapError('Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld) return idx # No need to increment through each feature in the model, simply check # the Layer metadata against what was given in the mapping dictionary. for field_name, ogr_name in self.mapping.items(): # Ensuring that a corresponding field exists in the model # for the given field name in the mapping. try: model_field = self.model._meta.get_field(field_name) except FieldDoesNotExist: raise LayerMapError('Given mapping field "%s" not in given Model fields.' % field_name) # Getting the string name for the Django field class (e.g., 'PointField'). fld_nam
e = model_field.__class__.__name__ if isinstance(model_field, GeometryField): if self.geom_field: raise LayerMapError('LayerMapping does
not support more than one GeometryField per model.') # Getting the coordinate dimension of the geometry field. coord_dim = model_field.dim try: if coord_dim == 3: gtype = OGRGeomType(ogr_name + '25D') else: gtype = OGRGeomType(ogr_name) except GDAL
bennylope/django-site-broadcasts
test.py
Python
mit
1,601
0
#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import organizations try: from setuptools import setup except ImportError: from distutils.core import setup if sys.argv[-1] == 'publish': os.system('python setup.py sdist upload') os.system('python setup.py bdist_wheel upload') sys.exit() readme = open('README.rst').read() history = open('HISTORY.rst').read().replace('.
. :changelog:', '')
setup( author="Ben Lopatin", author_email="ben@wellfire.co", name='django-organizations', version=organizations.__version__, description='Group accounts for Django', long_description=readme + '\n\n' + history, url='https://github.com/bennylope/django-organizations/', license='BSD License', platforms=['OS Independent'], packages=[ 'organizations', 'organizations.backends', 'organizations.south_migrations', 'organizations.templatetags', ], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Framework :: Django', ], install_requires=[ 'Django>=1.4.2', ], test_suite='tests', include_package_data=True, zip_safe=False, )
tyler274/Recruitment-App
tests/test_functional.py
Python
bsd-3-clause
3,980
0.000251
# -*- coding: utf-8 -*- """Functional tests using WebTest. See: http://webtest.readthedocs.org/ """ from flask import url_for from recruit_app.user.models import User from .factories import UserFactory class TestLoggingIn: """Login.""" def test_can_log_in_returns_200(self, user, testapp): """Login successful.""" # Goes to homepage res = testapp.get('/') # Fills out login form in navbar form = res.forms['loginForm'] form['email'] = user.email form['password'] = 'myprecious' # Submits # res = form.submit().follow() res = form.submit() assert res.status_code == 200 def test_sees_alert_on_log_out(self, user, testapp): """Show alert on logout.""" res = testapp.get('/') # Fills out login form in navbar form = res.forms['loginForm'] form['email'] = user.email form['password'] = 'myprecious' # Submits res = form.submit() res = testapp.get(url_for('security.logout')).follow() # sees alert assert 'loginForm' in res def test_sees_error_message_if_password_is_incorrect(self, user, testapp): """Show error if password is incorrect.""" # Goes to homepage res = testapp.get('/') # Fills out login form, password incorrect form = res.forms['loginForm'] form['email'] = user.email form['password'] = 'wrong' # Submits res = form.submit() # sees error assert 'Invalid password' in res def test_sees_error_message_if_email_doesnt_exist(self, user, testapp): """Show error if email doesn't exist.""" # Goes to homepage res = testapp.get('/') # Fills out login form, password incorrect form = res.forms['loginForm'] form['email'] = 'unknown@unknown.com' form['password'] = 'myprecious' # Submits res = form.submit() # sees error assert 'Specified user does not exist' in res class TestRegistering: """Register a user.""" def test_can_register(self, user, testapp): """Register a new user.""" old_count = len(User.query.all()) # Goes to homepage res = testapp.get('/') # Clicks Create Account button res = res.click('Create account') # Fills out the form form = res.forms['registerForm'] form['email'] = 'foo@bar.com' form['password'] = 'secret' form['password_confirm'] = 'secret' # Submits # res = form.submit().follow() res = form.submit().follow() assert res.status_code == 200 # A new user was created assert len(User.query.all()) == old_count + 1 def test_sees_error_message_if_passwords_dont_match(self, user, testapp): """Show error if passwords don't match.""" # Goes to registration page res = testapp.get(url_for('security.register')) # Fills out form, but passwords don't match form = res.forms['registerForm'] form['email'] = 'foo@bar.com' form['password'] = 'secret' form['password_confirm'] = 'secrets' # Submits res = form.submit() # sees error message assert 'Passwords do not match' in res def test_sees_error_message_if_user_already_registered(self, user, testapp): """Show error if user already registered.""" user = UserFactory(active=True) # A registered user user.save() # Goes to registration page res = testapp.
get(url_for('security.register')) # Fills out form, but email is already registered form = res.forms['registerForm'] form['email'] = user.email form['password'] = 'secret' form['password_confirm'] = 'secret' # Submits res = form.submit() # sees error assert
'is already associated with an account' in res
m5w/matxin-lineariser
matxin_lineariser/statistical_linearisation/Linearisation/NeuralNet.py
Python
gpl-3.0
431
0.006961
imp
ort sklearn.neural_network class NeuralNet: def __init__(self): self.regression = sklearn.neural_network.MLPRegressor(hidden_layer_sizes=100) def train(self, X, Y): self.regression.fit(X, Y) def score(self, X): return self.regression.predict(X) def set_param(self, param): self.regression.set_params(param) def get_param(self): return self.regression.get_param
s()
IndonesiaX/edx-platform
common/test/acceptance/tests/studio/test_studio_home.py
Python
agpl-3.0
5,862
0.001365
""" Acceptance tests for Home Page (My Courses / My Libraries). """ from bok_choy.web_app_test import WebAppTest from opaque_keys.edx.locator import LibraryLocator from ...fixtures import PROGRAMS_STUB_URL from ...fixtures.config import ConfigModelFixture from ...fixtures.programs import ProgramsFixture from ...pages.studio.auto_auth import AutoAuthPage from ...pages.studio.library import LibraryEditPage from ...pages.studio.index import DashboardPage, DashboardPageWithPrograms class CreateLibraryTest(WebAppTest): """ Test that we can create a new content library on the studio home page. ""
" def setUp(self): """
Load the helper for the home page (dashboard page) """ super(CreateLibraryTest, self).setUp() self.auth_page = AutoAuthPage(self.browser, staff=True) self.dashboard_page = DashboardPage(self.browser) def test_create_library(self): """ From the home page: Click "New Library" Fill out the form Submit the form We should be redirected to the edit view for the library Return to the home page The newly created library should now appear in the list of libraries """ name = "New Library Name" org = "TestOrgX" number = "TESTLIB" self.auth_page.visit() self.dashboard_page.visit() self.assertFalse(self.dashboard_page.has_library(name=name, org=org, number=number)) self.assertTrue(self.dashboard_page.has_new_library_button()) self.dashboard_page.click_new_library() self.assertTrue(self.dashboard_page.is_new_library_form_visible()) self.dashboard_page.fill_new_library_form(name, org, number) self.assertTrue(self.dashboard_page.is_new_library_form_valid()) self.dashboard_page.submit_new_library_form() # The next page is the library edit view; make sure it loads: lib_page = LibraryEditPage(self.browser, LibraryLocator(org, number)) lib_page.wait_for_page() # Then go back to the home page and make sure the new library is listed there: self.dashboard_page.visit() self.assertTrue(self.dashboard_page.has_library(name=name, org=org, number=number)) class DashboardProgramsTabTest(WebAppTest): """ Test the programs tab on the studio home page. """ def setUp(self): super(DashboardProgramsTabTest, self).setUp() ProgramsFixture().install_programs([]) self.auth_page = AutoAuthPage(self.browser, staff=True) self.dashboard_page = DashboardPageWithPrograms(self.browser) self.auth_page.visit() def set_programs_api_configuration(self, is_enabled=False, api_version=1, api_url=PROGRAMS_STUB_URL, js_path='/js', css_path='/css'): """ Dynamically adjusts the programs API config model during tests. """ ConfigModelFixture('/config/programs', { 'enabled': is_enabled, 'enable_studio_tab': is_enabled, 'enable_student_dashboard': is_enabled, 'api_version_number': api_version, 'internal_service_url': api_url, 'public_service_url': api_url, 'authoring_app_js_path': js_path, 'authoring_app_css_path': css_path, 'cache_ttl': 0 }).install() def test_tab_is_disabled(self): """ The programs tab and "new program" button should not appear at all unless enabled via the config model. """ self.set_programs_api_configuration() self.dashboard_page.visit() self.assertFalse(self.dashboard_page.is_programs_tab_present()) self.assertFalse(self.dashboard_page.is_new_program_button_present()) def test_tab_is_enabled_with_empty_list(self): """ The programs tab and "new program" button should appear when enabled via config. When the programs list is empty, a button should appear that allows creating a new program. """ self.set_programs_api_configuration(True) self.dashboard_page.visit() self.assertTrue(self.dashboard_page.is_programs_tab_present()) self.assertTrue(self.dashboard_page.is_new_program_button_present()) results = self.dashboard_page.get_program_list() self.assertEqual(results, []) self.assertTrue(self.dashboard_page.is_empty_list_create_button_present()) def test_tab_is_enabled_with_nonempty_list(self): """ The programs tab and "new program" button should appear when enabled via config, and the results of the program list should display when the list is nonempty. """ test_program_values = [('first program', 'org1'), ('second program', 'org2')] ProgramsFixture().install_programs(test_program_values) self.set_programs_api_configuration(True) self.dashboard_page.visit() self.assertTrue(self.dashboard_page.is_programs_tab_present()) self.assertTrue(self.dashboard_page.is_new_program_button_present()) results = self.dashboard_page.get_program_list() self.assertEqual(results, test_program_values) self.assertFalse(self.dashboard_page.is_empty_list_create_button_present()) def test_tab_requires_staff(self): """ The programs tab and "new program" button will not be available, even when enabled via config, if the user is not global staff. """ self.set_programs_api_configuration(True) AutoAuthPage(self.browser, staff=False).visit() self.dashboard_page.visit() self.assertFalse(self.dashboard_page.is_programs_tab_present()) self.assertFalse(self.dashboard_page.is_new_program_button_present())
vladimir-ipatov/ganeti
lib/tools/burnin.py
Python
gpl-2.0
42,651
0.00823
#!/usr/bin/python # # Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Burnin program """ import sys import optparse import time import socket import urllib from itertools import izip, islice, cycle from cStringIO import StringIO from ganeti import opcodes from ganeti import constants from ganeti import cli from ganeti import errors from ganeti import utils from ganeti import hypervisor from ganeti import compat from ganeti import pathutils from ganeti.confd import client as confd_client USAGE = ("\tburnin -o OS_NAME [options...] instance_name ...") MAX_RETRIES = 3 LOG_HEADERS = { 0: "- ", 1: "* ", 2: "", } #: Disk templates supporting a single node _SINGLE_NODE_DISK_TEMPLATES = compat.UniqueFrozenset([ constants.DT_DISKLESS, constants.DT_PLAIN, constants.DT_FILE, constants.DT_SHARED_FILE, constants.DT_EXT, constants.DT_RBD, ]) _SUPPORTED_DISK_TEMPLATES = compat.UniqueFrozenset([ constants.DT_DISKLESS, constants.DT_DRBD8, constants.DT_EXT, constants.DT_FILE, constants.DT_PLAIN, constants.DT_RBD, constants.DT_SHARED_FILE, ]) #: Disk templates for which import/export is tested _IMPEXP_DISK_TEMPLATES = (_SUPPORTED_DISK_TEMPLATES - frozenset([ constants.DT_DISKLESS, constants.DT_FILE, constants.DT_SHARED_FILE, ])) class InstanceDown(Exception): """The checked instance was not up""" class BurninFailure(Exception): """Failure detected during burning""" def Usage(): """Shows program usage information and exits the program.""" print >> sys.stderr, "Usage:" print >> sys.stderr, USAGE sys.exit(2) def Log(msg, *args, **kwargs): """Simple function that prints out its argument. """ if args: msg = msg % args indent = kwargs.get("indent", 0) sys.stdout.write("%*s%s%s\n" % (2 * indent, "", LOG_HEADERS.get(indent, " "), msg)) sys.stdout.flush() def Err(msg, exit_code=1): """Simple error logging that prints to stderr. """ sys.stderr.write(msg + "\n") sys.stderr.flush() sys.exit(exit_code) class SimpleOpener(urllib.FancyURLopener): """A simple url opener""" # pylint: disable=W0221 def prompt_user_passwd(self, host, realm, clear_cache=0): """No-interaction version of prompt_user_passwd.""" # we follow parent class' API # pylint: disable=W0613 return None, None def http_error_default(self, url, fp, errcode, errmsg, headers): """Custom error handling""" # make sure sockets are not left in CLOSE_WAIT, this is similar # but with a different exception to the BasicURLOpener class _ = fp.read() # throw away data fp.close() raise InstanceDown("HTTP error returned: code %s, msg %s" % (errcode, errmsg)) OPTIONS = [ cli.cli_option("-o", "--os", dest="os", default=None, help="OS to use during burnin", metavar="<OS>", completion_suggest=cli.OPT_COMPL_ONE_OS), cli.HYPERVISOR_OPT, cli.OSPARAMS_OPT, cli.cli_option("--disk-size", dest="disk_size", help="Disk size (determines disk count)", default="128m", type="string", metavar="<size,size,...>", completion_suggest=("128M 512M 1G 4G 1G,256M" " 4G,1G,1G 10G").split()), cli.cli_option("--disk-growth", dest="disk_growth", help="Disk growth", default="128m", type="string", metavar="<size,size,...>"), cli.cli_option("--mem-size", dest="mem_size", help="Memory size", default=None, type="unit", metavar="<size>", completion_suggest=("128M 256M 512M 1G 4G 8G" " 12G 16G").split()), cli.cli_option("--maxmem-size", dest="maxmem_size", help
="Max Memory size", default=256, type="unit", metavar="<size>
", completion_suggest=("128M 256M 512M 1G 4G 8G" " 12G 16G").split()), cli.cli_option("--minmem-size", dest="minmem_size", help="Min Memory size", default=128, type="unit", metavar="<size>", completion_suggest=("128M 256M 512M 1G 4G 8G" " 12G 16G").split()), cli.cli_option("--vcpu-count", dest="vcpu_count", help="VCPU count", default=3, type="unit", metavar="<count>", completion_suggest=("1 2 3 4").split()), cli.DEBUG_OPT, cli.VERBOSE_OPT, cli.NOIPCHECK_OPT, cli.NONAMECHECK_OPT, cli.EARLY_RELEASE_OPT, cli.cli_option("--no-replace1", dest="do_replace1", help="Skip disk replacement with the same secondary", action="store_false", default=True), cli.cli_option("--no-replace2", dest="do_replace2", help="Skip disk replacement with a different secondary", action="store_false", default=True), cli.cli_option("--no-failover", dest="do_failover", help="Skip instance failovers", action="store_false", default=True), cli.cli_option("--no-migrate", dest="do_migrate", help="Skip instance live migration", action="store_false", default=True), cli.cli_option("--no-move", dest="do_move", help="Skip instance moves", action="store_false", default=True), cli.cli_option("--no-importexport", dest="do_importexport", help="Skip instance export/import", action="store_false", default=True), cli.cli_option("--no-startstop", dest="do_startstop", help="Skip instance stop/start", action="store_false", default=True), cli.cli_option("--no-reinstall", dest="do_reinstall", help="Skip instance reinstall", action="store_false", default=True), cli.cli_option("--no-reboot", dest="do_reboot", help="Skip instance reboot", action="store_false", default=True), cli.cli_option("--no-renamesame", dest="do_renamesame", help="Skip instance rename to same name", action="store_false", default=True), cli.cli_option("--reboot-types", dest="reboot_types", help="Specify the reboot types", default=None), cli.cli_option("--no-activate-disks", dest="do_activate_disks", help="Skip disk activation/deactivation", action="store_false", default=True), cli.cli_option("--no-add-disks", dest="do_addremove_disks", help="Skip disk addition/removal", action="store_false", default=True), cli.cli_option("--no-add-nics", dest="do_addremove_nics", help="Skip NIC addition/removal", action="store_false", default=True), cli.cli_option("--no-nics", dest="nics", help="No network interfaces", action="store_const", const=[], default=[{}]), cli.cli_option("--no-confd", dest="do_confd_tests", help="Skip confd queries", action="store_false", default=constants.ENABLE_CONFD), cli.cli_option("--rename", dest="rename", default=None, help=("Give one unused instance name which is taken" " to start the renaming sequence"), metavar="<instance_name>"), cli.cli_option("-t", "--disk-template", dest="disk_template", choices=list(_SUPPORTED_DIS
rudikovrf/django_blog
generic/pagination.py
Python
mit
1,556
0.003213
from django.views.generic.base import ContextMixin class PaginationPages(ContextMixin): """ Class is for extending the context with pages_list and url_without_page for pagination. """ def get_context_data(self, **kwargs): """ Function extends the context with pages_list and url_without_page for pagination. """ from urllib.parse import urlencode context = super().get_context_data(**kwargs) paginator = context['paginator'] pages = list(paginator.page_range) current_page = context['page_obj'].number count = paginator.num_pages
start = current_page - self.page_dif if start < 1: start = 1 finish = current_page + self.page_dif pages_list = [] if start > 1: pages_list.append(1) if start > 2: pages_list.append('...') pages_list.extend(pages[start - 1:finish]) # Range don't start with 1,
# we get pages with numbers # from start to finish. if finish + 1 < count: pages_list.append('...') if finish < count: pages_list.append(count) context['pages_list'] = pages_list get = dict(self.request.GET.copy()) if 'page' in get: del get['page'] params = urlencode(get, doseq=True) context['url_without_page'] = self.request.path + '?' + params + '&page=' return context
tchellomello/home-assistant
tests/components/zoneminder/test_init.py
Python
apache-2.0
4,539
0.001763
"""Tests for init functions.""" from datetime import timedelta from zoneminder.zm import ZoneMinder from homeassistant import config_entries from homeassistant.components.zoneminder import const from homeassistant.components.zoneminder.common import is_client_in_data from homeassistant.config_entries import ( ENTRY_STATE_LOADED, ENTRY_STATE_NOT_LOADED, ENTRY_STATE_SETUP_RETRY, ) from homeassistant.const import ( A
TTR_ID, ATTR_NAME, CONF_HOS
T, CONF_PASSWORD, CONF_PATH, CONF_SOURCE, CONF_SSL, CONF_USERNAME, CONF_VERIFY_SSL, ) from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util from tests.async_mock import MagicMock, patch from tests.common import async_fire_time_changed async def test_no_yaml_config(hass: HomeAssistant) -> None: """Test empty yaml config.""" with patch( "homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder ) as zoneminder_mock: zm_client: ZoneMinder = MagicMock(spec=ZoneMinder) zm_client.get_zms_url.return_value = "http://host1/path_zms1" zm_client.login.return_value = True zm_client.get_monitors.return_value = [] zoneminder_mock.return_value = zm_client hass_config = {const.DOMAIN: []} await async_setup_component(hass, const.DOMAIN, hass_config) await hass.async_block_till_done() assert not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE) async def test_yaml_config_import(hass: HomeAssistant) -> None: """Test yaml config import.""" with patch( "homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder ) as zoneminder_mock: zm_client: ZoneMinder = MagicMock(spec=ZoneMinder) zm_client.get_zms_url.return_value = "http://host1/path_zms1" zm_client.login.return_value = True zm_client.get_monitors.return_value = [] zoneminder_mock.return_value = zm_client hass_config = {const.DOMAIN: [{CONF_HOST: "host1"}]} await async_setup_component(hass, const.DOMAIN, hass_config) await hass.async_block_till_done() assert hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE) async def test_load_call_service_and_unload(hass: HomeAssistant) -> None: """Test config entry load/unload and calling of service.""" with patch( "homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder ) as zoneminder_mock: zm_client: ZoneMinder = MagicMock(spec=ZoneMinder) zm_client.get_zms_url.return_value = "http://host1/path_zms1" zm_client.login.side_effect = [True, True, False, True] zm_client.get_monitors.return_value = [] zm_client.is_available.return_value = True zoneminder_mock.return_value = zm_client await hass.config_entries.flow.async_init( const.DOMAIN, context={CONF_SOURCE: config_entries.SOURCE_USER}, data={ CONF_HOST: "host1", CONF_USERNAME: "username1", CONF_PASSWORD: "password1", CONF_PATH: "path1", const.CONF_PATH_ZMS: "path_zms1", CONF_SSL: False, CONF_VERIFY_SSL: True, }, ) await hass.async_block_till_done() config_entry = next(iter(hass.config_entries.async_entries(const.DOMAIN)), None) assert config_entry assert config_entry.state == ENTRY_STATE_SETUP_RETRY assert not is_client_in_data(hass, "host1") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10)) await hass.async_block_till_done() assert config_entry.state == ENTRY_STATE_LOADED assert is_client_in_data(hass, "host1") assert hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE) await hass.services.async_call( const.DOMAIN, const.SERVICE_SET_RUN_STATE, {ATTR_ID: "host1", ATTR_NAME: "away"}, ) await hass.async_block_till_done() zm_client.set_active_state.assert_called_with("away") await config_entry.async_unload(hass) await hass.async_block_till_done() assert config_entry.state == ENTRY_STATE_NOT_LOADED assert not is_client_in_data(hass, "host1") assert not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
RealDolos/volaparrot
volaparrot/commands/info.py
Python
mit
10,159
0.001084
""" The MIT License (MIT) Copyright © 2015 RealDolos Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ #pylint: disable=unused-argument import logging from time import time from datetime import datetime, timedelta from humanize import naturaldelta from cachetools import LRUCache from .._version import __version__, __fulltitle__ from ..utils import get_text, get_json from .command import Command, PulseCommand from .db import DBCommand __all__ = [ "NiggersCommand", "CheckModCommand", "AboutCommand", "SeenCommand", "AsleepCommand", ] LOGGER = logging.getLogger(__name__) class NiggersCommand(Command): handlers = "!niggers", "!obamas" def __init__(self, *args, **kw): self.blacks = kw.get("args").blacks self.obamas = kw.get("args").obamas super().__init__(*args, **kw) def handle_niggers(self, cmd, remainder, msg): if not self.allowed(msg): return False self.post("{}, the following black gentlemen cannot use this bot: {}", msg.nick, ", ".join(self.blacks)) return True def handle_obamas(self, cmd, remainder, msg): if not self.allowed(msg): return False self.post("{}, the following half-black gentlemen can only use this bot " "once every couple of minutes: {}", msg.nick, ", ".join(self.obamas)) return True class CheckModCommand(Command): handlers = ":check" def staff(self, user): if user.lower() in ("mercwmouth",): self.post( "Yes, unfortunately the literally brown pajeet hitler " "and pretend lawyer {} is still a marginally trusted user", user) elif user.lower() in ("thersanderia",): self.post("Yes, {} is a Trusted Jizzcook with not one but two Michelin stars!", user) elif user.lower() in ("kalyx", "mercwmouth", "davinci", "liquid"): self.post( "Yes, unfortunately the fag " "{} is still a marginally trusted user", user) else: self.post("Yes, {} is still a marginally trusted user", user) def admin(self, user): if user.lower() in ("mercwmouth",): self.post( "Yes, unfortunately the literally brown pajeet hitler " "and pretend lawyer {} is still a marginally mod user", user) elif user.lower() in ("kalyx", "mercwmouth", "davinci", "liquid"): self.post("Yes, unfortunately the fag {} is still a mod", user) elif user.lower() == "ptc": self.post("Sweet jewprince is well and alive, unlike Y!erizon") else: self.post("Yes, {} is still a mod", user) def user(self, user): if user.lower() in ("mercwmouth",): self.post( "The literally brown pajeet hitler and pretend lawyer " "{} is only a designated user", user) if user.lower() == "ptc": self.post("Rest in pieces, sweet jewprince") elif user.lower() == "liquid": self.post("pls, Liquid will never be a mod") else: self.post("{} is not trusted, at all!", user) def handle_check(self, cmd, remainder, msg): remainder = remainder.strip() user = remainder if remainder and " " not in remainder else "MercWMouth" LOGGER.debug("Getting user %s", user) try: text, exp = get_text("https://volafile.org/user/{}".format(user)) if time() - exp > 120: get_text.cache_clear() get_json.cache_clear() text, exp = get_text("https://volafile.org/user/{}".format(user)) if "Error 404" in text: LOGGER.info("Not a user %s", user) return False i = self.room.conn.make_api_call("getUserInfo", params=dict(name=user)) if i.get("staff"): self.staff(user) elif i.get("admin"): self.admin(user) else: self.user(user) return True except Exception: LOGGER.exception("huh?") return False class AboutCommand(Command): handlers = "!about", ".about", "!parrot" def handle_cmd(self, cmd, remainder, msg): if not self.allowed(msg): return False self.post( "{}, I am {}, watch me fly:\n{}", remainder or msg.nick, __fulltitle__, "https://github.com/RealDolos/volaparrot/") return True class SeenCommand(DBCommand, Command, PulseCommand): interval = 5 * 60 seen = LRUCache(maxsize=50) start = time() usermap = { "auxo's waifu": "triggu", "doiosodolos": "Daniel", "cirno": "Daniel", "haskell": "Daniel", "ekecheiria": "Daniel", "baronbone": "Daniel", "cyberia": "Daniel", "countcoccyx": "Daniel", "doc": "Dongmaster", "jewmobile": "TheJIDF", "jew": "TheJIDF", "thejew": "TheJIDF", "mrshlomo": "TheJIDF", "pedo": "Counselor", "pede": "Counselor", "briseis": "Counselor", "notnot": "Counselor", "counselorpedro": "Counselor", "marky": "SuperMarky", "mcgill": "SuperMarky", "voladolos": "SuperMarky", "affarisdolos": "RealDolos", "gnuwin7dolos": "RealDolos", "cuck": "RealDolos", "merc": "MercWMouth", "cunt": "MercWMouth", "kak": "MercWMouth", "dolosodolos": "MODChatBotGladio", "fakedolos": "kreg", "laindolos": "kreg", "fakekreg": "kreg", "DaVinci": "Ian", "CuckDolos": "Ian", "DolosCuck": "Ian", "apha": "Polish plebbit pedo", "wombatfucker": "NEPTVola", } def mapname(self, name): if name.startswith("Xsa"): return "Xsa" return self.usermap.get(name.lower(), na
me) def __init
__(self, *args, **kw): super().__init__(*args, **kw) self.conn.execute("CREATE TABLE IF NOT EXISTS seen (" "user TEXT PRIMARY KEY, " "time INT" ")") try: cur = self.conn.cursor() cur.execute("SELECT time FROM seen ORDER BY time ASC LIMIT 1") self.start = int(cur.fetchone()[0]) / 1000 except Exception: LOGGER.exception("Failed to get min seen") def handles(self, cmd): return True def onpulse(self, pulse): try: LOGGER.debug("Dumping seen to db") cur = self.conn.cursor() cur.executemany( "INSERT OR REPLACE INTO seen VALUES(?, ?)", list((u, int(v * 1000)) for u, v in self.seen.items()) ) except Exception: LOGGER.exception("Failed to update seen") def handle_cmd(self, cmd, remainder, msg): nick = self.mapname(msg.nick).casefold() self.seen[nick] = time() if msg.admin
we-inc/mms-snow-white-and-the-seven-pandas
webserver/apps/booths/migrations/0003_auto_20171116_0220.py
Python
mit
501
0.001996
# -*- c
oding: utf-8 -*- # Generated by Django 1.11.7 on 2017-11-15 19:20 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('booths', '0002_auto_20171116_0045'), ] operations = [ migrations.AlterField( model_name='booth', name='rental_fee', field=models.DecimalField(decimal_places=2, max_digits=20, verbose_name='Rental Fee'),
), ]
gsarma/PyOpenWorm
tests/ContextTest.py
Python
mit
12,077
0.000414
import rdflib from rdflib.term import URIRef, Variable from PyOpenWorm.dataObject import DataObject, InverseProperty from PyOpenWorm.context import Context from PyOpenWorm.context_store import ContextStore from .DataTestTemplate import _DataTest try: from unittest.mock import MagicMock, Mock except ImportError: from mock import MagicMock, Mock class ContextTest(_DataTest): def test_inverse_property_context(self): class A(DataObject): def __init__(self, **kwargs): super(A, self).__init__(**kwargs) self.a = A.ObjectProperty(value_type=B) class B(DataObject): def __init__(self, **kwargs): super(B, self).__init__(**kwargs) self.b = B.ObjectProperty(value_type=A) InverseProperty(B, 'b', A, 'a') ctx1 = Context(ident='http://example.org/context_1') ctx2 = Context(ident='http://example.org/context_2') a = ctx1(A)(ident='a') b = ctx2(B)(ident='b') a.a(b) expected = (URIRef('b'), URIRef('http://openworm.org/entities/B/b'), URIRef('a')) self.assertIn(expected, list(ctx1.contents_triples())) def test_defined(self): class A(DataObject): def __init__(self, **kwargs): super(A, self).__init__(**kwargs) self.a = A.ObjectProperty(value_type=B) def defined_augment(self): return self.a.has_defined_value() def identifier_augment(self): return self.make_identifier(self.a.onedef().identifier.n3()) class B(DataObject): def __init__(self, **kwargs): super(B, self).__init__(**kwargs) self.b = B.ObjectProperty(value_type=A) InverseProperty(B, 'b', A, 'a') ctx1 = Context(ident='http://example.org/context_1') ctx2 = Context(ident='http://example.org/context_2') a = ctx1(A)() b = ctx2(B)(ident='b') a.a(b) self.assertTrue(a.defined) def test_save_context_no_graph(self): ctx = Context() del ctx.conf['rdf.graph'] with self.assertRaisesRegexp(Exception, r'graph'): ctx.save_context() def test_context_store(self): class A(DataObject): pass ctx = Context(ident='http://example.com/context_1') ctx(A)(ident='anA') self.assertIn(URIRef('anA'), tuple(x.identifier for x in ctx.query(A)().load())) def test_decontextualize(self): class A(DataObject): pass ctx = Context(ident='http://example.com/context_1') ctxda = ctx(A)(ident='anA') self.assertIsNone(ctxda.decontextualize().context) def test_init_imports(self): ctx = Context(ident='http://example.com/context_1') self.assertEqual(len(list(ctx.imports)), 0) def test_zero_imports(self): ctx0 = Context(ident='http://example.com/context_0') ctx = Context(ident='http://example.com/context_1') ctx.save_imports(ctx0) self.assertEqual(len(ctx0), 0) def test_save_import(self): ctx0 = Context(ident='http://example.com/context_0') ctx = Context(ident='http://example.com/context_1') new_ctx = Context(ident='http://example.com/context_1') ctx.add_import(new_ctx) ctx.save_imports(ctx0) self.assertEqual(len(ctx0), 1) def test_add_import(self): ctx0 = Context(ident='http://example.com/context_0') ctx = Context(ident='http://example.com/context_1') ctx2 = Context(ident='http://example.com/context_2') ctx2_1 = Context(ident='http://example.com/context_2_1') ctx.add_import(ctx2) ctx.add_import(ctx2_1) ctx3 = Context(ident='http://example.com/context_3') ctx3.add_import(ctx) final_ctx = Context(ident='http://example.com/context_1', imported=(ctx3,)) final_ctx.save_imports(ctx0) self.assertEqual(len(ctx0), 4) def test_init_len(self): ctx = Context(ident='http://example.com/context_1') self.assertEqual(len(ctx), 0) def test_len(self): ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) for i in range(5): ctx.add_statement(create_mock_statement(ident_uri, i)) self.assertEqual(len(ctx), 5) def test_add_remove_statement(self): ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) stmt_to_remove = create_mock_statement(ident_uri, 42) for i in range(5): ctx.add_statement(create_mock_statement(ident_uri, i)) ctx.add_statement(stmt_to_remove) ctx.remove_statement(stmt_to_remove) self.assertEqual(len(ctx), 5) def test_add_statement_with_different_context(self): ctx = Context(ident='http://example.com/context_1') stmt1 = create_mock_statement('http://example.com/context_2', 1) with self.assertRaises(ValueError): ctx.add_statement(stmt1) def test_contents_triples(self): res_wanted = [] ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) for i in range(5): stmt = create_mock_statement(ident_uri, i) ctx.add_statement(stmt) res_wanted.append(stmt.to_triple()) for triples in ctx.contents_triples(): self.assertTrue(triples in res_wanted) def test_clear(self): ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) for i in range(5): ctx.add_statement(create_mock_statement(ident_uri, i)) ctx.clear() self.assertEqual(len(ctx), 0) def test_save_context(self): graph = set() ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) for i in range(5): ctx.add_statement(create_mock_statement(ident_uri, i)) ctx.save_context(graph) self.assertEqual(len(graph), 5) def test_save_context_with_inline_imports(self): graph = set() ident_uri = 'http://example.com/context_1' ident_uri2 = 'http://example.com/context_2' ide
nt_uri2_1 = 'http://example.com/context_2_1' ident_uri3 = 'http://example.com/context_3' ident_uri4
= 'http://example.com/context_4' ctx = Context(ident=ident_uri) ctx2 = Context(ident=ident_uri2) ctx2_1 = Context(ident=ident_uri2_1) ctx.add_import(ctx2) ctx.add_import(ctx2_1) ctx3 = Context(ident=ident_uri3) ctx3.add_import(ctx) last_ctx = Context(ident=ident_uri4) last_ctx.add_import(ctx3) ctx.add_statement(create_mock_statement(ident_uri, 1)) ctx2.add_statement(create_mock_statement(ident_uri2, 2)) ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1)) ctx3.add_statement(create_mock_statement(ident_uri3, 3)) last_ctx.add_statement(create_mock_statement(ident_uri4, 4)) last_ctx.save_context(graph, True) self.assertEqual(len(graph), 5) def test_triples_saved(self): graph = set() ident_uri = 'http://example.com/context_1' ident_uri2 = 'http://example.com/context_2' ident_uri2_1 = 'http://example.com/context_2_1' ident_uri3 = 'http://example.com/context_3' ident_uri4 = 'http://example.com/context_4' ctx = Context(ident=ident_uri) ctx2 = Context(ident=ident_uri2) ctx2_1 = Context(ident=ident_uri2_1) ctx.add_import(ctx2) ctx.add_import(ctx2_1) ctx3 = Context(ident=ident_uri3) ctx3.add_import(ctx) last_ctx = Context(ident=ident_uri4) last_ctx.add_import(ctx3) ctx.add_statement(create_mock_statement(ident_uri, 1)) ctx2.add_statement(create_mock_statement(ident_uri2, 2)) ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1)) ctx3.add_statement(create_mock_statement(ident_uri3, 3)) last_ctx.add_statement(create_mock_statement(ident_uri4, 4)) last_ctx.sav
PetePriority/home-assistant
homeassistant/helpers/state.py
Python
apache-2.0
8,174
0
"""Helpers that help with state related things.""" import asyncio import datetime as dt import json import logging from collections import defaultdict from types import TracebackType from typing import ( # noqa: F401 pylint: disable=unused-import Awaitable, Dict, Iterable, List, Optional, Tuple, Type, Union) from homeassistant.loader import bind_hass import homeassistant.util.dt as dt_util from homeassistant.components.notify import ( ATTR_MESSAGE, SERVICE_NOTIFY) from homeassistant.components.sun import ( STATE_ABOVE_HORIZON, STATE_BELOW_HORIZON) from homeassistant.components.mysensors.switch import ( ATTR_IR_CODE, SERVICE_SEND_IR_CODE) from homeassistant.components.cover import ( ATTR_POSITION, ATTR_TILT_POSITION) from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_ALARM_ARM_AWAY, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_DISARM, SERVICE_ALARM_TRIGGER, SERVICE_LOCK, SERVICE_TURN_OFF, SERVICE_TURN_ON, SERVICE_UNLOCK, SERVICE_OPEN_COVER, SERVICE_CLOSE_COVER, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_HOME, STATE_LOCKED, STATE_NOT_HOME, STATE_OFF, STATE_ON, STATE_OPEN, STATE_UNKNOWN, STATE_UNLOCKED, SERVICE_SELECT_OPTION) from homeassistant.core import ( Context, State, DOMAIN as HASS_DOMAIN) from homeassistant.util.async_ import run_coroutine_threadsafe from .typing import HomeAssistantType _LOGGER = logging.getLogger(__name__) GROUP_DOMAIN = 'group' # Update this dict of lists when new services are added to HA. # Each item is a service with a list of required attributes. SERVICE_ATTRIBUTES = { SERVICE_NOTIFY: [ATTR_MESSAGE], SERVICE_SEND_IR_CODE: [ATTR_IR_CODE], SERVICE_SELECT_OPTION: [ATTR_OPTION], SERVICE_SET_COVER_POSITION: [ATTR_POSITION], SERVICE_SET_COVER_TILT_POSITION: [ATTR_TILT_POSITION] } # Update this dict when new services are added to HA. # Each item is a service with a corresponding state. SERVICE_TO_STATE = { SERVICE_TURN_ON: STATE_ON, SERVICE_TURN_OFF: STATE_OFF, SERVICE_ALARM_ARM_AWAY: STATE_ALARM_ARMED_AWAY, SERVICE_ALARM_ARM_HOME: STATE_ALARM_ARMED_HOME, SERVICE_ALARM_DISARM: STATE_ALARM_DISARMED, SERVICE_ALARM_TRIGGER: STATE_ALARM_TRIGGERED, SERVICE_LOCK: STATE_LOCKED, SERVICE_UNLOCK: STATE_UNLOCKED, SERVICE_OPEN_COVER: STATE_OPEN, SERVICE_CLOSE_COVER: STATE_CLOSED } class AsyncTrackStates: """ Record the time when the with-block is entered. Add all states that have changed since the start time to the return list when with-block is exited. Must be run within the event loop. """ def __init__(self, hass: HomeAssistantType) -> None: """Initialize a TrackStates block.""" self.hass = hass self.states = [] # type: List[State] # pylint: disable=attribute-defined-outside-init def __enter__(self) -> List[State]: """Record time from which to track changes.""" self.now = dt_util.utcnow
() return self.states def __exit__(self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]) -> None: """Add changes states to changes list.""" self.states.extend(get_changed_since(self.hass.states.async_all(), self.now)) def get_changed_since(states: Iterable[State], utc_point_in_time: dt.datetime) -> List[State]:
"""Return list of states that have been changed since utc_point_in_time.""" return [state for state in states if state.last_updated >= utc_point_in_time] @bind_hass def reproduce_state(hass: HomeAssistantType, states: Union[State, Iterable[State]], blocking: bool = False) -> None: """Reproduce given state.""" return run_coroutine_threadsafe( # type: ignore async_reproduce_state(hass, states, blocking), hass.loop).result() @bind_hass async def async_reproduce_state( hass: HomeAssistantType, states: Union[State, Iterable[State]], blocking: bool = False, context: Optional[Context] = None) -> None: """Reproduce a list of states on multiple domains.""" if isinstance(states, State): states = [states] to_call = defaultdict(list) # type: Dict[str, List[State]] for state in states: to_call[state.domain].append(state) async def worker(domain: str, data: List[State]) -> None: component = getattr(hass.components, domain) if hasattr(component, 'async_reproduce_states'): await component.async_reproduce_states( data, context=context) else: await async_reproduce_state_legacy( hass, domain, data, blocking=blocking, context=context) if to_call: # run all domains in parallel await asyncio.gather(*[ worker(domain, data) for domain, data in to_call.items() ]) @bind_hass async def async_reproduce_state_legacy( hass: HomeAssistantType, domain: str, states: Iterable[State], blocking: bool = False, context: Optional[Context] = None) -> None: """Reproduce given state.""" to_call = defaultdict(list) # type: Dict[Tuple[str, str], List[str]] if domain == GROUP_DOMAIN: service_domain = HASS_DOMAIN else: service_domain = domain for state in states: if hass.states.get(state.entity_id) is None: _LOGGER.warning("reproduce_state: Unable to find entity %s", state.entity_id) continue domain_services = hass.services.async_services().get(service_domain) if not domain_services: _LOGGER.warning( "reproduce_state: Unable to reproduce state %s (1)", state) continue service = None for _service in domain_services.keys(): if (_service in SERVICE_ATTRIBUTES and all(attr in state.attributes for attr in SERVICE_ATTRIBUTES[_service]) or _service in SERVICE_TO_STATE and SERVICE_TO_STATE[_service] == state.state): service = _service if (_service in SERVICE_TO_STATE and SERVICE_TO_STATE[_service] == state.state): break if not service: _LOGGER.warning( "reproduce_state: Unable to reproduce state %s (2)", state) continue # We group service calls for entities by service call # json used to create a hashable version of dict with maybe lists in it key = (service, json.dumps(dict(state.attributes), sort_keys=True)) to_call[key].append(state.entity_id) domain_tasks = [] # type: List[Awaitable[Optional[bool]]] for (service, service_data), entity_ids in to_call.items(): data = json.loads(service_data) data[ATTR_ENTITY_ID] = entity_ids domain_tasks.append( hass.services.async_call(service_domain, service, data, blocking, context) ) if domain_tasks: await asyncio.wait(domain_tasks, loop=hass.loop) def state_as_number(state: State) -> float: """ Try to coerce our state to a number. Raises ValueError if this is not possible. """ from homeassistant.components.climate import ( STATE_HEAT, STATE_COOL, STATE_IDLE) if state.state in (STATE_ON, STATE_LOCKED, STATE_ABOVE_HORIZON, STATE_OPEN, STATE_HOME, STATE_HEAT, STATE_COOL): return 1 if state.state in (STATE_OFF, STATE_UNLOCKED, STATE_UNKNOWN, STATE_BELOW_HORIZON, STATE_CLOSED, STATE_NOT_HOME, STATE_IDLE): return 0 return float(state.state)
ESOedX/edx-platform
openedx/core/lib/api/authentication.py
Python
agpl-3.0
4,363
0.002521
""" Common Authentication Handlers used across projects. """ from __future__ import absolute_import import logging import django.utils.timezone from oauth2_provider import models as dot_models from provider.oauth2 import models as dop_models from rest_framework.exceptions import AuthenticationFailed from rest_framework_oauth.authentication import OAuth2Authentication OAUTH2_TOKEN_ERROR = u'token_error' OAUTH2_TOKEN_ERROR_EXPIRED = u'token_expired' OAUTH2_TOKEN_ERROR_MALFORMED = u'token_malformed' OAUTH2_TOKEN_ERROR_NONEXISTENT = u'token_nonexistent' OAUTH2_TOKEN_ERROR_NOT_PROVIDED = u'token_not_provided' log = logging.getLogger(__name__) class OAuth2AuthenticationAllowInactiveUser(OAuth2Authentication): """ This is a temporary workaround while the is_active field on the user is coupled with whether or not the user has verified ownership of their claimed email address. Once is_active is decoupled from verified_email, we will no longer need this class override. But until then, this authentication class ensures that the user is logged in, but does not require that their account "is_active". This class can be used for an OAuth2-accessible endpoint that allows users to access that endpoint without having their email verified. For example, this is used for mobile endpoints. """ def authenticate(self, *args, **kwargs): """ Returns two-tuple of (user, token) if access token authentication succeeds, raises an AuthenticationFailed (HTTP 401) if authentication fails or None if the user did not try to authenticate using an access token. """ try: return super(OAuth2AuthenticationAllowInactiveUser, self).authenticate(*args, **kwargs) except AuthenticationFailed as exc: if isinstance(exc.detail, dict): developer_message = exc.detail['developer_message'] error_code = exc.detail['error_code'] else: developer_message = exc.detail if 'No credentials provided' in developer_message: error_code = OAUTH2_TOKEN_ERROR_NOT_PROVIDED elif 'Token string should not contain spaces' in developer_message: error_code = OAUTH2_TOKEN_ERROR_MALFORMED else: error_code = OAUTH2_TOKEN_ERROR raise AuthenticationFailed({ u'error_code': error_code, u'developer_message': developer_message }) def authenticate_credentials(self, request, access_token): """ Authenticate the request, given the access token. Overrides base class implementation to discard failure if user is inactive. """ token = self.get_access_token(access_token) if not token: raise AuthenticationFailed({ u'error_code': OAUTH2_TOKEN_ERROR_NONEXISTENT, u'developer_message': u'The provided access token does not match any valid tokens.' }) elif token.expires < django.utils.timezone.now(): raise AuthenticationFailed({ u'error_code': OAUTH2_TOK
EN_ERROR_EXPIRED, u'developer_message': u'The provided access token has expired and is no longer valid.', }) else: return token.user, token def get_access_token(self, access_token): """ Return a valid access token that exists in one of our OAuth2 libraries, or None if no matching token is found. """ return self._get_dot_token(access
_token) or self._get_dop_token(access_token) def _get_dop_token(self, access_token): """ Return a valid access token stored by django-oauth2-provider (DOP), or None if no matching token is found. """ token_query = dop_models.AccessToken.objects.select_related('user') return token_query.filter(token=access_token).first() def _get_dot_token(self, access_token): """ Return a valid access token stored by django-oauth-toolkit (DOT), or None if no matching token is found. """ token_query = dot_models.AccessToken.objects.select_related('user') return token_query.filter(token=access_token).first()
alexandermendes/pybossa-github-builder
tests/__init__.py
Python
bsd-3-clause
554
0.001805
# -*- coding: utf8 -*- import sys import os import pybossa_github_builder as plugin from mock import patch # Use the PyBossa test suite sys.path.append(os.path.abspath("./pybossa/test")) from default import with_context def s
etUpPackage(): """Setup the plugin.""" from default import flask_app with flask_app.app_context(): settings = os.path.abspath('./settings_test.py') flask_app.config.from_pyfile(settings) plugin_dir = os.path.dirname(plugin.__file__) plugin.PyBossaGitHubBu
ilder(plugin_dir).setup()
inuyasha2012/tornado-cat-example
example/main.py
Python
mit
8,865
0.002237
# coding=utf-8 from psycopg2.extras import NamedTupleCursor, Json from tornado.web import Application, HTTPError from tornado import gen from tornado.ioloop import IOLoop from tornado.httpserver import HTTPServer from tornado.options import parse_command_line import momoko import os from bank import SelectQuestion, get_level_one_item from base import BaseHandler, SessionBaseHandler from settings import MAX_ANSWER_COUNT, DSN, COOKIE_SECRET from utils import Flow, get_quiz_stage, Que, session_reset, CheckChoice class QuestionnaireListHandler(BaseHandler): @gen.coroutine def get(self): # 问卷列表 cursor = yield self.db.execute("SELECT id, name FROM questionnaire;") q_list = cursor.fetchall() self.render('index.html', q_list=q_list) class QuestionHandler(SessionBaseHandler): @gen.coroutine def _check_q_exist_n_get_q_a(self, q_id): """ :param q_id: :raise gen.Return: 返回去q_a,q是questionnaire,a是answer """ session_key = self.session_key cursor = yield self.db.execute( """ SELECT answer.id as aid, answer.score_answer, answer.old_answer, answer.order_answer, answer.try_count, answer.has_finished, questionnaire.id, questionnaire.type, questionnaire.second, questionnaire.flow, questionnaire.level_one_count from answer INNER JOIN questionna
ire ON ans
wer.questionnaire_id = questionnaire.id WHERE answer.questionnaire_id=%s AND answer.session_key=%s; """, (q_id, session_key) ) # q_a的意思是questionnaire and answer q_a = cursor.fetchone() if not q_a: cursor = yield self.db.execute("SELECT id, type, flow, level_one_count, second " "FROM questionnaire WHERE id=%s;", (q_id,)) q = cursor.fetchone() if q: cursor = yield self.db.execute("INSERT INTO answer (questionnaire_id, session_key, " "score_answer, order_answer, old_answer) VALUES (%s, %s, %s, %s, %s)" "RETURNING id AS aid, score_answer, " "order_answer, old_answer, try_count, " "has_finished;", (q_id, session_key, Json({}), Json({}), Json({}))) ans = cursor.fetchone() raise gen.Return((q, ans)) else: raise HTTPError(404) else: raise gen.Return((q_a, q_a)) @gen.coroutine def get(self, q_id): session = self.session q_a = yield self._check_q_exist_n_get_q_a(q_id) q, ans = q_a # 下面是session的键值 is_re_start = 'is_%s_re_start' % q_id step = '%s_step' % q_id stage = '%s_stage' % q_id next_item = '%s_next_item' % q_id step_count = '%s_step_count' % q_id # 被试答题的过程 flow = Flow(flow=q.flow, name=session.session_key) # 如果session不存在is_X_start_id,说明被试可能关闭了浏览器,所以重新启动测验 if not session.get(is_re_start, True): # 判断测验的第一阶段是否处于结束位置 if session[stage] == 1: next_item_list = session[next_item] que = Que(*next_item_list.pop(0)) else: next_item = session[next_item] que = Que(*next_item) # 将是否重新测验设定为真,则若关闭浏览器或刷新页面,则重启测验 session[is_re_start] = True session[step] += 1 session[stage] = get_quiz_stage(session[step], session[stage], flow) else: # 开始测验或重启测验,session恢复出厂设置 session_reset(session, q_id) # 测验作答次数+1 if ans.try_count > (MAX_ANSWER_COUNT - 1): raise HTTPError(403) # 之前的旧答案存入old_answer中 if ans.score_answer: ans.old_answer.update(ans.score_answer) ans.score_answer.clear() ans.order_answer.clear() # 第一阶段需要回答的题量 count = flow.get_level_item_count(1) # 给用户展现的第一道试题 que = yield get_level_one_item(ans, session, q, count, self.db) yield self.db.execute( "UPDATE answer SET has_finished = false, try_count = try_count + 1, score_answer=%s, order_answer=%s, " "old_answer=%s WHERE id=%s", (Json(ans.score_answer), Json(ans.order_answer), Json(ans.old_answer), ans.aid) ) # 总共答题量 session[step_count] = flow.total_item_count yield self.db.execute("UPDATE question SET count = count + 1 WHERE id=%s", (que.id, )) total_step_count = session[step_count] current_step = session[step] current_progress = int((current_step * 1.0 / total_step_count) * 100) second = q.second session['q_%s_id' % q_id] = que yield self.save() self.render('cat.html', que=que, current_progress=current_progress, total_step_count=total_step_count, current_step=current_step, q_id=q_id, second=second) @gen.coroutine def post(self, q_id): session = self.session q_a = yield self._check_q_exist_n_get_q_a(q_id) q, ans = q_a q_type = q.type que = Que(*session.get('q_%s_id' % q_id)) que_choice = self.get_argument('question') check_choice = CheckChoice(que_choice, que) if check_choice.is_valid(): # 保存作答结果 value = check_choice.value session['%s_score' % q_id].append(int(value)) ans.score_answer[str(que.id)]['score'] = value ans.score_answer[str(que.id)]['choice'] = que_choice # 生成重定向URL SelectQuestionClass = getattr(SelectQuestion, q_type) url = yield SelectQuestionClass(session=session, q=q, que_id=que.id, ans=ans, db=self.db).get_que_then_redirect() yield self.save() self.redirect(url) else: # 数据不合格则返回原作答页面 current_step = session['%s_step' % q_id] total_step_count = session['%s_step_count' % q_id] current_progress = int((current_step * 1.0 / total_step_count) * 100) second = q.second self.render('cat.html', que=que, current_progress=current_progress, total_step_count=total_step_count, current_step=current_step, q_id=q_id, second=second) class ResultHandler(BaseHandler): @gen.coroutine def _check_result_exist_n_get_q_a(self, q_id): session_key = self.get_cookie('sessionid') if not session_key: raise HTTPError(404) cursor = yield self.db.execute( """ SELECT answer.score_answer, answer.order_answer, answer.has_finished from answer INNER JOIN questionnaire ON answer.questionnaire_id = questionnaire.id WHERE answer.questionnaire_id=%s AND answer.session_key=%s; """, (q_id, session_key) ) # q_a的意思是questionnaire and answer q_a = cursor.fetchone() if (not q_a) or (not q_a.has_finished): raise HTTPError(404) else: raise gen.Return(q_a) @gen.coroutine def get(self, q_id): q_a = yield self._check_result_exist_n_get_q_a(q_id) self.render('result.html', q_a=q_a, q_id=q_id) if __name__ == "__main__": parse_command_line() ioloop = IOLoop.instance() application = Application([ (r"/", QuestionnaireListHandler), (r"/cat/(\d+)", QuestionHandler), (r"/result/(\d+)", ResultHandler) ], template_path=os.path.join(os.path.dirname(__file__), "templates"), static_path=os.path.join(os.path.dirname(__file__), "static"), cookie_secret=COOKIE_SECRET, debug=True, xsrf_cookies=True, ) application.db = momoko.Pool( dsn=DSN, si
denys-zarubin/sweetheart_test
quiz/migrations/0012_auto_20170407_1442.py
Python
unlicense
547
0
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-04-07 14:42 from __f
uture__ import unicode_literals from django.conf import settings from django.db import migrations class Migration(migrations.Migration): dependencies =
[ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('quiz', '0011_auto_20170407_1440'), ] operations = [ migrations.AlterUniqueTogether( name='selectedanswer', unique_together=set([('quiz', 'user'), ('question', 'answer')]), ), ]
FIDATA/database-draft
predefined-data/import.py
Python
gpl-3.0
4,368
0.027033
#!/usr/bin/env python # -*- coding, utf-8 -*- # FIDATA. Open-source system for analysis of financial and economic data # Copyright © 2013 Basil Peace # This file is part of FIDATA. # # FIDATA is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # FIDATA is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with FIDATA. If not, see <http://www.gnu.org/licenses/>. from FIDATA import * initArgParser('Importer of predefined data', defLogFilename = 'import.log') initFIDATA() from csv import DictReader from os import path from PIL import Image classes = [] logging.info('Import of predefined data started') # logging.info('Importing langs') # reader = DictReader(open('langs.csv', 'r', encoding = 'UTF8'), delimiter = ';') # for row in reader: # Lang(FIDATA, row = row, write = True, tryGetFromDB = False) # del reader # commit() # classes += [Lang] logging.info('Importing scripts') reader = DictReader(open('scripts.csv', 'r', encoding = 'UTF8'), delimiter = ';') for row in reader: Script(FIDATA, row = row, write = True, tryGetFromDB = False) del reader commit() classes += [Script] logging.info('Importing countries') reader = DictReader(open('countries.csv', 'r', encoding = 'UTF8'), delimiter = ';') for row in reader: # parent_country # associated_with if row['alpha2_code'] == '': row['alpha2_code'] = None else: flagFilename = 'flags\{:s}.png'.format(row['alpha2_code'].lower()) if path.exists(flagFilename): row['flag'] = Image.open(flagFilename) if row['gov_website'] == '': row['gov_website'] = None if row['stats_website'] == '': row['stats_website'] = None FIDATA.country(row = row, write = True, tryGetFromDB = False) del reader commit() classes += [Country] # logging.info('Importing issuers') # reader = DictReader(open('issuers.csv', 'r', encoding = 'UTF8'), delimiter = ';') # for row in reader: # FIDATA.issuer(row = row, write = True, tryGetFromDB = False) # del reader # commit() # classes += [Issuer] # logging.info('Importing currencies') # reader = DictReader(open('currencies.csv', 'r', encoding = 'UTF8'), delimiter = ';') # for row in reader: # row['ins
tr_type'] = I
nstrumentType.Currency # FIDATA.instrument(row = row, write = True, tryGetFromDB = False) # del reader # commit() # logging.info('Importing instruments') # reader = DictReader(open('instruments.csv', 'r', encoding = 'UTF8'), delimiter = ';') # for row in reader: # FIDATA.instrument(row = row, write = True, tryGetFromDB = False) # del reader # commit() # classes += [Instrument] logging.info('Importing markets') reader = DictReader(open('markets.csv', 'r', encoding = 'UTF8'), delimiter = ';') child_markets = list() for row in reader: if row['country_alpha2_code'] == '': row['country'] = None else: row['country'] = FIDATA.country(row = { 'alpha2_code': row['country_alpha2_code'], 'name' : row['country_name'] }) if row['acronym'] == '': row['acronym'] = None if row['website'] == '': row['website'] = None if row['trade_organizer_symbol'] == '': FIDATA.market(row = row, write = True, tryGetFromDB = False) else: child_markets.append((FIDATA.market(row = row, write = False, tryGetFromDB = False), row['trade_organizer_symbol'])) del reader for (market, trade_organizer_symbol) in child_markets: market.tradeOrganizer = FIDATA.market(row = {'symbol': trade_organizer_symbol}) market.write() del child_markets commit() classes += [Market] logging.info('Importing data providers') reader = DictReader(open('data_providers.csv', 'r', encoding = 'UTF8'), delimiter = ';') for row in reader: if row['trade_organizer_symbol'] == '': row['trade_organizer'] = None else: row['trade_organizer'] = FIDATA.market(row = {'symbol': row['trade_organizer_symbol']}) FIDATA.dataProvider(row = row, write = True, tryGetFromDB = False) del reader commit() classes += [DataProvider] logging.info('Import of predefined data finished') FIDATA.analyze(classes)
larsbegas/rip-master
sites/_minux_tst.py
Python
gpl-2.0
10,576
0.023166
#!/usr/bin/python from sys import exit import sys from site_imgsrc import imgsrc from site_imgur import imgur from site_deviantart import deviantart from site_photobucket import photobucket from site_flickr import flickr from site_twitter import twitter from site_tumblr import tumblr from site_instagram import instagram from site_imagefap import imagefap from site_imagebam import imagebam from site_imagearn import imagearn from site_xhamster import xhamster from site_getgonewild import getgonewild from site_anonib import anonib from site_motherless import motherless from site_4chan import fourchan from site_occ import occ from site_minus import minus from site_gifyo import gifyo from site_imgsrc import imgsrc from site_five00px import five00px from site_chickupload import chickupload from site_cghub import cghub from site_teenplanet import teenplanet from site_chansluts import chansluts from site_gonearch import gonearch from site_chanarchive import chanarchive from site_seenive import seenive try: #i = imgur('http://imgur.com/a/8vmpo/noscript') #i = imgur('http://scopolamina.imgur.com/') #i = imgur('http://fuckmyusername.imgur.com') #i = imgur('http://imgur.com/a/brixs') #i = imgur('http://imgur.com/a/nvE9y') #i = imgur('http://spicymustard.imgur.com/') # empty user acct #i = imagefap('http://www.imagefap.com/pictures/2885204/Kentucky-Craigslist') #i = imagefap('http://www.imagefap.com/pictures/3958759/Busty-Selfshooter') #i = imagefap('http://www.imagefap.com/pictures/3960306/teen-fun/') #i = imagebam('http://www.imagebam.com/gallery/3e4u10fk034871hs6idcil6txauu3ru6/') #i = imagebam('http://www.imagebam.com/image/1ca1ab109274357') #i = imagebam('http://www.imagebam.com/gallery/g23rwux1oz1g6n9gzjqw2k4e6yblqxdu') #i = deviantart('http://angelsfalldown1.deviantart.com/gallery/2498849') #i = deviantart('http://angelsfalldown1.deviantart.com/gallery/2498856') #i = deviantart('http://dreamersintheskies.deviantart.com/gallery/') # Gets more than gmi-total #i = deviantart('http://dreambaloon.deviantart.com/gallery/') #i = deviantart('http://easy-shutter.deviantart.com/gallery/42198389') #i = deviantart('http://garv23.deviantart.com') #i = deviantart('http://wrouinr.deviantart.com/') #i = photobucket('http://s579.beta.photobucket.com/user/merkler/library/') #i = photobucket('http://s1131.beta.photobucket.com/user/Beth_fan/library/') #i = photobucket('http://s1069.beta.photobucket.com/user/mandymgray/library/Album%203') #i = photobucket('http://s1
216.beta.photobucket.com/user/Liebe_Dich/prof
ile/') #i = flickr('http://www.flickr.com/photos/beboastar/sets/72157630130722172/') #i = flickr('https://secure.flickr.com/photos/peopleofplatt/sets/72157624572361792/with/6344610705/') #i = flickr('http://www.flickr.com/photos/rphotoit/sets/72157631879138251/with/8525941976/') #i = flickr('http://www.flickr.com/photos/29809540@N04/') #i = twitter('https://twitter.com/darrow_ashley') #i = twitter('https://twitter.com/lemandicandi') #i = twitter('https://twitter.com/MrNMissesSmith') #i = twitter('https://twitter.com/PBAprilLewis') # GONE #i = twitter('https://twitter.com/EversSecrets') # GONE #i = tumblr('http://caramiaphotography.tumblr.com/tagged/me') #i = tumblr('http://1fakeyfake.tumblr.com') #i = tumblr('http://mourning-sex.tumblr.com/tagged/me') #i = tumblr('http://i-was-masturbating-when-i.tumblr.com/') #i = instagram('http://web.stagram.com/n/glitterypubez/') #i = imagearn('http://imagearn.com/gallery.php?id=128805') #i = imagearn('http://imagearn.com/gallery.php?id=29839') #i = imagearn('http://imagearn.com/image.php?id=5046077') #i = xhamster('http://xhamster.com/photos/gallery/1306566/lovely_teen_naked_for_self_shots.html') #i = xhamster('http://xhamster.com/photos/gallery/1443114/cute_teens.html') #i = xhamster('http://xhamster.com/photos/gallery/1742221/amateur_black_girls_volume_4-2.html') #i = getgonewild('http://getgonewild.com/profile/EW2d') #i = getgonewild('http://getgonewild.com/s/miss_ginger_biscuit') #i = getgonewild('http://getgonewild.com/profile/yaymuffinss') #i = anonib('http://www.anonib.com/t/res/1780.html') #i = anonib('http://www.anonib.com/t/res/5019.html') #i = anonib('http://www.anonib.com/tblr/res/12475.html') #i = anonib('http://www.anonib.com/t/res/1780+50.html') #i = anonib('http://www.anonib.com/tblr/res/12475+50.html') #i = motherless('http://motherless.com/GI39ADA2C') #i = motherless('http://motherless.com/GABDCF08') #i = motherless('http://motherless.com/G7DC1B74') #i = motherless('http://motherless.com/GV9719092') #i = fourchan('http://boards.4chan.org/s/res/14035564') #i = occ('http://forum.oneclickchicks.com/showthread.php?t=137808') #i = occ('http://forum.oneclickchicks.com/showthread.php?t=102994') #i = occ('http://forum.oneclickchicks.com/album.php?albumid=12579') #i = occ('http://forum.oneclickchicks.com/showthread.php?t=146037') #i = minus('http://minus.com') #i = minus('http://.minus.com') #i = minus('http://i.minus.com') #i = minus('http://www.minus.com') #i = minus('http://zuzahgaming.minus.com/mF31aoo7kNdiM') #i = minus('https://nappingdoneright.minus.com/mu6fuBNNdfPG0') #i = minus('http://nappingdoneright.minus.com/mu6fuBNNdfPG0') #i = minus('https://nappingdoneright.minus.com/') #i = minus('https://nappingdoneright.minus.com') #i = minus('https://nappingdoneright.minus.com/uploads') #i = gifyo('http://gifyo.com/ccrystallinee/') #i = gifyo('http://gifyo.com/deanandhepburn/') # private #i = imgsrc('http://imgsrc.ru/main/pic.php?ad=774665') #i = imgsrc('http://imgsrc.ru/jp101091/26666184.html?pwd=&lang=en#') #i = imgsrc('http://imgsrc.ru/hugo004/21447611.html') #i = imgsrc('http://imgsrc.ru/fotoivanov/a661729.html') #i = imagefap('http://www.imagefap.com/pictures/1561127/young-porn-girlie-masterbating') #i = imagefap('http://www.imagefap.com/pictures/3883233/Maya-Black-Hot-Ts-2013') #i = xhamster('http://xhamster.com/photos/gallery/635024/kira_the_beautiful_busty_redhead_xxx.html') #i = five00px('http://500px.com/xxxsweetxxx') #i = imgur('http://imgur.com/r/realgirls/new/day/') #i = imgur('http://imgur.com/r/amateurarchives/top/all/') #i = chickupload('http://chickupload.com/gallery/106023/Z64FYY7Q') #i = deviantart('http://depingo.deviantart.com/gallery/') #i = teenplanet('http://photos.teenplanet.org/atomicfrog/Dromeus/Skinny_Babe_vs_Bfs_Cock') #i = cghub('http://wacomonkey.cghub.com/images/', urls_only=True) #i = fourchan('http://boards.4chan.org/s/res/14177077', urls_only=True) #i = anonib('http://www.anonib.com/azn/res/74347.html', urls_only=True) #i = chickupload('http://chickupload.com/gallery/30621/OMTDRPYU', urls_only=True) #i = deviantart('http://kindi-k.deviantart.com/gallery/', urls_only=True) #i = five00px('http://500px.com/xxxsweetxxx', urls_only=True) #i = getgonewild('http://getgonewild.com/profile/twoholes101', urls_only=True) #i = gifyo('http://gifyo.com/ccrystallinee/', urls_only=True) #i = imagearn('http://imagearn.com/gallery.php?id=226220', urls_only=True) #i = imagebam('http://www.imagebam.com/gallery/3e4u10fk034871hs6idcil6txauu3ru6/', urls_only=True) #i = imagefap('http://www.imagefap.com/pictures/2885204/Kentucky-Craigslist', urls_only=True) #i = imgsrc('http://imgsrc.ru/fotoivanov/a661729.html', urls_only=True) #i = imgur('http://imgur.com/a/brixs', urls_only=True) #i = instagram('http://web.stagram.com/n/glitterypubez/', urls_only=True) #i = minus('http://zuzahgaming.minus.com/mF31aoo7kNdiM', urls_only=True) #i = motherless('http://motherless.com/G7DC1B74', urls_only=True) #i = tumblr('http://caramiaphotography.tumblr.com/tagged/me', urls_only=True) #i = twitter('h
beqa2323/learntosolveit
languages/python/design_stack.py
Python
bsd-3-clause
507
0.013807
""" Implementation of stack data structure in Python. """ class Stack: def __init__(self,*vargs): self.stack
= list(vargs) def __repr__(self): return str(self.stack) def top(self): return self.stack[0] def push(self,elem): self.stack.insert(0,elem) def pop(self)
: return self.stack.pop(0) if __name__ == '__main__': stk = Stack(1,2,3,4) print stk print stk.top() stk.push(10) print stk print stk.pop() print stk
sburnett/seattle
seattlelib/tests/test_dylink_include.py
Python
mit
1,136
0.033451
""" Author: Armon Dadgar Description: This test checks that the dylink pre-processor methods are working properly by "including" the sockettimeout library. We then check that the functions work. This test uses the old "include" directive """ # Import the sockettimeout library include sockettimeout def new_conn(ip,port,sock,ch1,ch2): # Wait 3 seconds, then send data sleep(2) sock.send("Ping! Ping!") sock.close() if callfunc == "initialize": # Check that we have the basic openconn,waitforconn and stop
comm # This will throw an Attribute error if these are not set check = timeout_openconn check = timeout_waitforconn check = timeout_stopcomm # Get our ip ip = getmyip() port = 12345 # Setup a waitforconn waith = timeout_waitforconn(ip,port,new_conn) # Try to do a timeout openconn sock = timeout_openconn(ip,port,timeout=2)
# Set the timeout to 1 seconds, and try to read sock.settimeout(1) try: data = sock.recv(16) # We should timeout print "Bad! Got data: ",data except: pass # Close the socket, and shutdown sock.close() timeout_stopcomm(waith)
deepmind/acme
acme/jax/losses/impala.py
Python
apache-2.0
3,849
0.001299
# Copyright 2018 DeepMind Technologies Limited. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Loss function for IMPALA (Espeholt et al., 2018) [1]. [1] https://arxiv.org/abs/1802.01561 """ from typing import Callable from acme.agents.jax.impala import types from acme.jax import utils import haiku as hk import jax.numpy as jnp import numpy as np import reverb import rlax import tree def impala_loss( unroll_fn: types.PolicyValueFn, *, discount: float, max_abs_reward: float = np.inf, baseline_cost: float = 1., entropy_cost: float = 0., ) -> Callable[[hk.Params, reverb.ReplaySample], jnp.DeviceArray]: """Builds the standard entropy-regularised IMPALA loss function. Args: unroll_fn: A `hk.Transformed` object containing a callable which maps (params, observations_seque
nce, initial_state) -> ((logits, value), state) discount: The standard geometric discount rate to apply. max_abs_reward: Optional symmetric reward clipping to apply. b
aseline_cost: Weighting of the critic loss relative to the policy loss. entropy_cost: Weighting of the entropy regulariser relative to policy loss. Returns: A loss function with signature (params, data) -> loss_scalar. """ def loss_fn(params: hk.Params, sample: reverb.ReplaySample) -> jnp.DeviceArray: """Batched, entropy-regularised actor-critic loss with V-trace.""" # Extract the data. data = sample.data observations, actions, rewards, discounts, extra = (data.observation, data.action, data.reward, data.discount, data.extras) initial_state = tree.map_structure(lambda s: s[0], extra['core_state']) behaviour_logits = extra['logits'] # Apply reward clipping. rewards = jnp.clip(rewards, -max_abs_reward, max_abs_reward) # Unroll current policy over observations. (logits, values), _ = unroll_fn(params, observations, initial_state) # Compute importance sampling weights: current policy / behavior policy. rhos = rlax.categorical_importance_sampling_ratios(logits[:-1], behaviour_logits[:-1], actions[:-1]) # Critic loss. vtrace_returns = rlax.vtrace_td_error_and_advantage( v_tm1=values[:-1], v_t=values[1:], r_t=rewards[:-1], discount_t=discounts[:-1] * discount, rho_tm1=rhos) critic_loss = jnp.square(vtrace_returns.errors) # Policy gradient loss. policy_gradient_loss = rlax.policy_gradient_loss( logits_t=logits[:-1], a_t=actions[:-1], adv_t=vtrace_returns.pg_advantage, w_t=jnp.ones_like(rewards[:-1])) # Entropy regulariser. entropy_loss = rlax.entropy_loss(logits[:-1], jnp.ones_like(rewards[:-1])) # Combine weighted sum of actor & critic losses, averaged over the sequence. mean_loss = jnp.mean(policy_gradient_loss + baseline_cost * critic_loss + entropy_cost * entropy_loss) # [] return mean_loss return utils.mapreduce(loss_fn, in_axes=(None, 0))
Fablab-Sevilla/ghPython101
Día_002/01_EJ/T_002/Peter random walk final.py
Python
mit
1,275
0.026709
import rhinoscriptsyntax as rs #Importamos random import random as rn #Creamos seed y listas para los puntos, lineas y triangulos rn.seed(s) pts = [pt0] lines = [] triangles = [] newptList = [pt0] #Iteramos para la creacion de cada punto linea y triangulo for i in range(It): #los puntos en polares con angulos y pasos controladp angulos = rn.randint(Amin,AMAX) steps = rn.randint(Rmin,RMAX) NewPts = rs.Polar((pts[-1]),angulos,steps) pts.append(NewPts) #Una vez creado los puntos creamos los triangulos, #Primero la linea y el punto medio a = pts[i] b = pts[i+1] line = rs.AddLine(a,b) lines.append(line) #Sacamos el vector normal a la recta y lo escalam
os por un random rnleng = ((rn.randint(3,5))/10) z = rs.CurveN
ormal(line) vector = rs.VectorCreate(a,b) nor = rs.VectorCrossProduct(vector,z) normal = rs.VectorScale(nor,rnleng) trans1 = rs.XformTranslation(normal) trans2 = rs.XformTranslation(rs.VectorReverse(normal)) #Desplazamos los puntos medios el vector normal escalado newpts1 = rs.PointTransform(a,trans1) newpts2 = rs.PointTransform(a,trans2) tri = rs.AddPolyline([b,newpts1,newpts2,b]) triangles.append(tri) ptList = pts
ares/robottelo
robottelo/cli/org.py
Python
gpl-3.0
6,156
0
# -*- encoding: utf-8 -*- """ Usage:: hammer organization [OPTIONS] SUBCOMMAND [ARG] ... Parameters:: SUBCOMMAND subcommand [ARG] ... subcommand arguments Subcommands:: add-computeresource Associate a resource add-configtemplate Associate a resource add-domain Associate a resource add-environment Associate a resource add-hostgroup Associate a resource add-location Associate a location add-medium Associate a resource add-smartproxy Associate a resource add-subnet Associate a resource add-user Associate a resource create Create an organization delete Delete an organization delete-parameter Delete parameter for an organization. info Show an organization list List all organizations remove_computeresource Disassociate a resource remove_configtemplate Disassociate a resource remove_domain Disassociate a resource remove_environment Disassociate a resource remove_hostgroup Disassociate a resource remove-location Disassociate a location remove_medium Disassociate a resource remove_smartproxy Disassociate a resource remove_subnet Disassociate a resource remove_user Disassociate a resource set-parameter Create or update parameter for an organization. update Update an organization """ from robottelo.cli.base import Base class Org(Base): """Manipulates Foreman's Organizations""" command_base = 'organization' @classmethod def add_compute_resource(cls, options=None): """Adds a computeresource to an org""" cls.command_sub = 'add-compute-resource' return cls.execute(cls._construct_command(options)) @classmethod def remove_compute_resource(cls, options=None): """Removes a computeresource from an org""" cls.command_sub = 'remove-compute-resource' return cls.execute(cls._construct_command(options)) @classmethod def add_config_template(cls, options=None): """Adds a configtemplate to an org""" cls.command_sub = 'add-config-template' return cls.execute(cls._construct_command(options)) @classmethod def remove_config_template(cls, options=None): """Removes a configtemplate from an org""" cls.command_sub = 'remove-config-template' return cls.execute(cls._construct_command(options)) @classmethod def add_domain(cls, options=None): """Adds a domain to an org""" cls.command_sub = 'add-domain' return cls.execute(cls._construct_command(options)) @classmethod def remove_domain(cls, options=None): """Removes a domain from an org""" cls.command_sub = 'remove-domain' return cls.execute(cls._construct_command(options)) @classmethod def add_environment(cls, options=None): """Adds an environment to an org""" cls.command_sub = 'add-environment' return cls.execute(cls._construct_command(options
)) @classmethod def remove_environment(cls, options=None): """Removes an environment from an org""" cls.command_sub = 'remove-environment' return cls.execute(cls._construct_command(options)) @classmethod def add_hostgroup(cls, options=None): """Adds a hostgroup to an org""" cls.command_sub = 'add-hostgroup' return cls.execute(cls._construct_command(options)) @classmethod def remove_hostgroup(cls, options=None): "
""Removes a hostgroup from an org""" cls.command_sub = 'remove-hostgroup' return cls.execute(cls._construct_command(options)) @classmethod def add_location(cls, options=None): """Adds a location to an org""" cls.command_sub = 'add-location' return cls.execute(cls._construct_command(options)) @classmethod def remove_location(cls, options=None): """Removes a location from an org""" cls.command_sub = 'remove-location' return cls.execute(cls._construct_command(options)) @classmethod def add_medium(cls, options=None): """Adds a medium to an org""" cls.command_sub = 'add-medium' return cls.execute(cls._construct_command(options)) @classmethod def remove_medium(cls, options=None): """Removes a medium from an org""" cls.command_sub = 'remove-medium' return cls.execute(cls._construct_command(options)) @classmethod def add_smart_proxy(cls, options=None): """Adds a smartproxy to an org""" cls.command_sub = 'add-smart-proxy' return cls.execute(cls._construct_command(options)) @classmethod def remove_smart_proxy(cls, options=None): """Removes a smartproxy from an org""" cls.command_sub = 'remove-smart-proxy' return cls.execute(cls._construct_command(options)) @classmethod def add_subnet(cls, options=None): """Adds existing subnet to an org""" cls.command_sub = 'add-subnet' return cls.execute(cls._construct_command(options)) @classmethod def remove_subnet(cls, options=None): """Removes a subnet from an org""" cls.command_sub = 'remove-subnet' return cls.execute(cls._construct_command(options)) @classmethod def add_user(cls, options=None): """Adds an user to an org""" cls.command_sub = 'add-user' return cls.execute(cls._construct_command(options)) @classmethod def remove_user(cls, options=None): """Removes an user from an org""" cls.command_sub = 'remove-user' return cls.execute(cls._construct_command(options))
mjirik/imtools
imtools/gt_lar_smooth.py
Python
mit
14,966
0.00254
#! /usr/bin/python # -*- coding: utf-8 -*- """ Generator of histology report """ import logging logger = logging.getLogger(__name__) # import funkcí z jiného adresáře import sys import os.path path_to_script = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(path_to_script, "../../lar-cc/lib/py/")) sys.path.append(os.path.join(path_to_script, "../lisa/extern")) sys.path.append(os.path.join(path_to_script, "../../pyplasm/src/pyplasm")) import numpy as np from scipy import mat, cos, sin from larcc import VIEW, MKPOL, AA, INTERVALS, STRUCT, MAP, PROD from larcc import UNITVECT, VECTPROD, PI, SUM, CAT, IDNT, UNITVECT from splines import BEZIER, S1, S2, COONSPATCH # from splines import * # import mapper #import hpc #import pyplasm.hpc import geometry3d as g3 import interpolation_pyplasm as ip import skelet3d import skelet3d.gt_lar_smooth from skelet3d.gt_lar_smooth import GTLarSmooth logger.warning("Module is moved to package skelet3d.gt_vtk. This placeholder will be removed in future") # class GTLarSmooth: # # def __init__(self, gtree=None): # """ # gtree is information about input data structure. # endDistMultiplicator: make cylinder shorter by multiplication of radius # """ # # input of geometry and topology # self.V = [] # self.CV = [] # self.joints = {} # self.joints_lar = [] # self.gtree = gtree # self.endDistMultiplicator = 2 # self.use_joints = True # #dir(splines) # pass # # def add_cylinder(self, nodeA, nodeB, radius, cylinder_id): # # try: # idA = tuple(nodeA) # self.gtree.tree_data[cylinder_id]['nodeIdA'] # idB = tuple(nodeB) # self.gtree.tree_data[cylinder_id]['nodeIdB'] # except: # idA = 0 # idB = 0 # self.use_joints = False # # vect = np.array(nodeA) - np.array(nodeB) # u = g3.perpendicular_vector(vect) # u = u / np.linalg.norm(u) # u = u.tolist() # vect = vect.tolist() # # # # c1 = self.__circle(nodeA, radius, vect) # c2 = self.__circle(nodeB, radius, vect) # tube = BEZIER(S2)([c1,c2]) # domain = PROD([ INTERVALS(2*PI)(36), INTERVALS(1)(4) ]) # tube = MAP(tube)(domain) # # # self.joints_lar.append(tube) # # # #self.__draw_circle(nodeB, vect, radius) # # ##vector = (np.array(nodeA) - np.array(nodeB)).tolist() # # # mov circles to center of cylinder by size of radius because of joint # ##nodeA = g3.translate(nodeA, vector, # ## -radius * self.endDistMultiplicator) # ##nodeB = g3.translate(nodeB, vector, # ## radius * self.endDistMultiplicator) # # ##ptsA, ptsB = g3.cylinder_circles(nodeA, nodeB, radius, element_number=32) # ##CVlistA = self.__construct_cylinder_end(ptsA, idA, nodeA) # ##CVlistB = self.__construct_cylinder_end(ptsB, idB, nodeB) # # ##CVlist = CVlistA + CVlistB # # ##self.CV.append(CVlist) # # # lar add ball # # ball0 = mapper.larBall(radius, angle1=PI, angle2=2*PI)([10, 16]) # # V, CV = ball0 # # # mapper.T # # # ball = STRUCT(MKPOLS(ball0)) # # # # # mapper.T(1)(nodeA[0])(mapper.T(2)(nodeA[1])(mapper.T(3)(nodeA[1])(ball))) # # # # lenV = len(self.V) # # # # self.V = self.V + (np.array(V) + np.array(nodeA)).tolist() # # self.CV = self.CV + (np.array(CV) + lenV).tolist() # # def __circle(self, center=[0,0,0],radius=1,normal=[0,0,1],sign=1,shift=0): # N = UNITVECT(normal) # if N == [0,0,1] or N == [0,0,-1]: Q = mat(IDNT(3)) # else: # QX = UNITVECT((VECTPROD([[0,0,1],N]))) # QZ = N # QY = VECTPROD([QZ,QX]) # Q = mat([QX,QY,QZ]).T # def circle0(p): # u = p[0] # x = radius*cos(sign*u+shift) #
y
= radius*sin(sign*u+shift) # z = 0 # return SUM([ center, CAT((Q*[[x],[y],[z]]).tolist()) ]) # return circle0 # # # def __construct_cylinder_end(self, pts, id, node): # """ # creates end of cylinder and prepares for joints # """ # CVlist = [] # # base # ln = len(self.V) # # for i, pt in enumerate(pts): # self.V.append(pt) # CVlist.append(ln + i) # # try: # self.joints[id].append([node, CVlist]) # except: # self.joints[id] = [[node, CVlist]] # # return CVlist # # def __add_old_cylinder(self, nodeA, nodeB, radius): # """ # deprecated simple representation of cylinder # """ # nodeA = np.array(nodeA) # nodeB = np.array(nodeB) # # ln = len(self.V) # self.V.append(nodeB.tolist()) # self.V.append((nodeB + [2, 0, 0]).tolist()) # self.V.append((nodeB + [2, 2, 0]).tolist()) # self.V.append((nodeB + [2, 2, 2]).tolist()) # self.V.append((nodeA + [0, 0, 0]).tolist()) # self.CV.append([ln, ln + 1, ln + 2, ln + 3, ln + 4]) # # def finish(self): # print 'use joints? ', self.use_joints # if self.use_joints: # for joint in self.joints.values(): # # There is more then just one circle in this joint, so it # # is not end of vessel # if len(joint) > 1: # self.__generate_joint(joint) # # # def __half_plane(self, perp, plane_point, point): # cdf = (np.array(point) - np.array(plane_point)) # out = perp[0] * cdf[0] +\ # perp[1] * cdf[1] + \ # perp[2] * cdf[2] # return out > 0 # # def __get_vessel_connection_curve(self, vessel_connection, perp, vec0, vec1): # """ # perp is perpendicular to plane given by centers of circles # vec1, vec0 are vectors from circle centers # """ # curve_t = [] # curve_d = [] # curve_pts_indexes_t = [] # curve_pts_indexes_d = [] # brake_point_t = None # brake_point_d = None # center, circle = vessel_connection # # # left to right # perp_lr = np.cross(perp, vec1) # # print 'center ', center # print 'circle ', circle # for vertex_id in circle: # if ((len(curve_pts_indexes_t) > 0) and # (vertex_id - curve_pts_indexes_t[-1]) > 1): # brake_point_t = len(curve_pts_indexes_t) # if ((len(curve_pts_indexes_d) > 0) and # (vertex_id - curve_pts_indexes_d[-1]) > 1): # brake_point_d = len(curve_pts_indexes_d) # # #hp = self.__half_plane(perp_lr, center, self.V[vertex_id]) # hp = self.__half_plane(perp, center, self.V[vertex_id]) # # # if(hp): # curve_t.append(self.V[vertex_id]) # curve_pts_indexes_t.append(vertex_id) # else: # curve_d.append(self.V[vertex_id]) # curve_pts_indexes_d.append(vertex_id) # # ordered_curve_t = curve_t[brake_point_t:] + curve_t[:brake_point_t] # ordered_pts_indexes_t = \ # curve_pts_indexes_t[brake_point_t:] +\ # curve_pts_indexes_t[:brake_point_t] # # ordered_curve_d = curve_d[brake_point_d:] + curve_d[:brake_point_d] # ordered_pts_indexes_d = \ # curve_pts_indexes_d[brake_point_t:] +\ # curve_pts_indexes_d[:brake_point_d] # #print ' hp v id ', curve_pts_indexes_t # #print 'ord hp v id ', ordered_pts_indexes_t # # #print 'hp circle ', curve_one # # # add point from oposit half-circle # first_pt_d = ordered_curve_d[0] # last_pt_d = ordered_curve_d[-1] # first_pt_t = ordered_curve_t[0] # last_pt_t = ordered_curve_t[-1] # # ordered_curve_t.append(first_pt_d) # ordered_curve_t.insert(0, last_pt_d) # # ordered_curve_d.append(first_pt_t) # ordered_curve_d.insert(0, last_pt_t) # #
IsmaelRLG/simpbot
simpbot/irc.py
Python
mit
16,036
0.000873
# -*- coding: utf-8 -*- # Simple Bot (SimpBot) # Copyright 2016-2017, Ismael Lugo (kwargs) import re import sys import ssl import time import logging import socket from six import binary_type from six import string_types from six import PY3 as python3 from six.moves import _thread from six.moves import queue from . import buffer from . import features from . import __version__ from .bottools import text from . import localedata from . import envvars from .schedule import basic_scheduler as scheduler i18n = localedata.get() Logger = logging.getLogger('simpbot') regexmsg = re.compile( ':(?P<mask>(?P<nick>.+)!(?P<user>.+)@(?P<host>[^ ]+)) ' '(?P<type>PRIVMSG|NOTICE) (?P<target>[^ ]+) :(?P<message>.+)', 2) sche_name = lambda nw, nm: '{network}-{name}'.format(network=nw, name=nm) lg_format = [] class client: dispatcher_added = False dispatcher_
dict = {} def __init__(self, netw, addr, port, nick, user, nickserv=None, sasl=None, timeout=240, msgps=.5, wtime=30, servpass=None, prefix='!', lang=None, plaintext={ 'recv': ['msg', 'jpqk', 'mode'], 'send': ['msg', 'jpqk', 'mode']}): # msg PRIVMSG, NOTICE # jpqk JOIN, PART, QUIT, KICK # mode CHANNEL AND USER MODES # plane plain text self.logger = logging.getLogger(net
w) if envvars.daemon is True: fs = '%(asctime)s %(levelname)s: %(message)s' handler = logging.FileHandler(envvars.logs.join(netw).lower(), 'a') else: fs = '%(levelname)s: irc-client(%(name)s): %(message)s' handler = logging.StreamHandler(sys.stdout) if not netw in lg_format: handler.setFormatter(logging.Formatter(fs, None)) self.logger.addHandler(handler) self.logger.propagate = 0 lg_format.append(netw) self.connection_status = 'n' self.input_alive = False self.output_alive = False self.lock = False self.socket = None self.input_buffer = None self.output_buffer = queue.Queue() self.features = features.FeatureSet() self.plaintext = plaintext self.default_lang = lang self.dbstore = None self.request = None self.commands = None self.autoconnect = False self.conf_path = None self.max_chars = 256 # IRC - Default self.servname = netw self.addr = addr self.ssl = False if isinstance(port, string_types): if port.startswith('+'): self.ssl = True port = port.replace('+', '') if port.isdigit(): port = int(port) else: port = 6667 elif isinstance(port, float) or isinstance(port, int): port = int(port) self.port = port # IRC - Extra self.servpass = servpass self.nickserv = nickserv self.usens = bool(self.nickserv) self.sasl = sasl self.timeout = timeout self.msgps = msgps self.wtime = wtime self.prefix = prefix std = sche_name(netw, 'std') self.scheduler_std = scheduler(std, self, envvars.jobs.join(std)) self.scheduler_std.load() self.scheduler_std.start() ban = sche_name(netw, 'ban') self.scheduler_ban = scheduler(ban, self, envvars.jobs.join(ban)) self.scheduler_ban.load() self.scheduler_ban.start() if nick == "" or nick[0].isdigit(): nick = text.randphras(l=7, upper=False, nofd=True) if user == "" or user[0].isdigit(): user = text.randphras(l=7, upper=False, nofd=True) self.nickname = nick self.username = user def __del__(self): self.disconnect() self.set_status('s') if self.dbstore: self.dbstore.save() def set_status(self, modes): """ mode: n: No connected c: Connected r: Connected and loged p: Concection lost d: Disconnected """ self.connection_status = modes[0] def connect(self, servpass=None, attempts=0): if not self.connection_status in 'np': return attempt = 0 while attempt <= attempts: try: self.socket = socket.socket() self.socket.settimeout(self.timeout) self.input_buffer = buffer.LineBuffer() if self.ssl: self.socket = ssl.wrap_socket(self.socket) self.socket.connect((self.addr, self.port)) self.set_status('c') break except Exception as error: self.logger.error(i18n['connection failure'], self.addr, self.port, str(error)) self.logger.info(i18n['retrying connect'] % self.wtime) time.sleep(self.wtime) if attempts == 1: attempt += 2 elif attempts > 0: attempt += 1 else: return True remote_addr = self.socket.getpeername()[0] self.logger.info(i18n['connected'], self.addr, remote_addr, self.port) if servpass is not None: self.servpass = servpass if self.servpass is not None: self.passwd(self.servpass) if self.nickserv is None: return elif self.sasl: # Simple Authentication and Security Layer (SASL) - RFC 4422 # Copyright (C) The Internet Society (2006). pw = '{0}\0{0}\0{1}'.format(self.nickserv[0], self.nickserv[1]) self.send_raw('AUTHENTICATE PLAIN') self.send_raw('AUTHENTICATE ' + pw.encode('base64')) def check_plaintext(self, pos, opt): if pos in self.plaintext: if 'all' in self.plaintext[pos]: return False return opt in self.plaintext[pos] else: return False @property def connected(self): return self.connection_status == 'c' or self.connection_status == 'r' def reconnect(self, msg=""): self.disconnect(msg) if self.connection_status == 'd': self.set_status('n') self.try_connect() def disconnect(self, msg=""): if self.connection_status in 'cr': self.quit(msg) time.sleep(2.5) self.set_status('d') if self.socket: try: self.socket.close() except: pass if self.request: self.request.reset() self.output_buffer.put(0) def login(self): self.user(self.username, self.nickname) self.nick(self.nickname) def output(self): while self.connection_status in 'crp': self.output_alive = True try: text = self.output_buffer.get(timeout=self.timeout) except queue.Empty: if self.connection_status in 'cr': self.set_status('p') self.try_connect() if text == 0: break if isinstance(text, string_types): if python3: message = text.encode() + b'\r\n' else: message = text + '\r\n' else: self.logger.warning(i18n['invalid message']) continue if len(text) > 512: self.logger.warrning(i18n['invalid message size']) continue try: self.socket.send(message) except socket.error: if self.connection_status in 'cr': self.set_status('p') self.try_connect() if 'send' in self.plaintext and 'all' in self.plaintext['send']: self.logger.info(i18n['output'], text) time.sleep(self.msgps) else: self.input_alive =
jcftang/ansible
lib/ansible/module_utils/ios.py
Python
gpl-3.0
2,926
0.005126
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # (c) 2016 Red Hat Inc. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # from ansible.module_utils.network_common import to_list _DEVICE_CONFIGS = {} def get_config(module, flags=[]): cmd = 'show running-config ' cmd += ' '.join(flags) cmd = cmd.strip() try: return _DEVICE_CONFIGS[cmd] except KeyError: rc, out, err = module.exec_command(cmd) if rc != 0: module.fail_json(msg='unable to retrieve current config', stderr=err) cfg = str(out).strip() _DEVICE_CONFIGS[cmd] = cfg return cfg def run_commands(module, commands, check_rc=True): responses = list() for cmd in to_list(commands): rc, out, err = module.exec_command(cmd) if check_rc and rc != 0: module.fail_json(msg=err, rc=rc) responses.append(out) ret
urn responses def load_config(module, commands): assert isinstance(commands, list), 'commands must be a list' rc, out, err = module.exec_command('configure terminal') if rc != 0: module.fail_json(msg='unable to enter configuration mode', err=err) for command in commands: if command == 'end': continue rc, out, err = module.ex
ec_command(command) if rc != 0: module.fail_json(msg=err, command=command, rc=rc) module.exec_command('end')
david-hoffman/scripts
test_imreg_dph.py
Python
apache-2.0
575
0
import numpy as np from imreg_dph import AffineTransform from nose.tools import * def test_translation(): """make sure tranlation works""" # AffineTransform Tests af1 = AffineTransform(translation=(1, 2)) af2 = AffineTransform(translation=(5, 3)) af3 = af1 @ af2 assert np.array_equal(af3.translation, (6, 5)) assert af3 ==
af2 @ af1 def test_rotation(): """Test that rotation works""" af1 = AffineTransform(rotation=2) af2 = AffineTransform(rotation=1) af3 = af1 @ af2
assert af3.rotation == 3 assert af3 == af2 @ af1
allenai/allennlp
allennlp/training/scheduler.py
Python
apache-2.0
3,040
0.003289
from typing import Dict, Any import torch class Scheduler: """ A `Scheduler` is a generalization of PyTorch learning rate schedulers. A scheduler can be used to update any field in an optimizer's parameter groups, not just the learning rate. During training using the AllenNLP `Trainer`, this is the API and calling sequence for `step` and `step_batch`:: scheduler = ... # creates scheduler batch_num_total = 0 for epoch in range(num_epochs): for batch in batchs_in_epoch: # compute loss, update parameters with current learning rates # call step_batch AFTER updating parameters batch_num_total += 1 scheduler.step_batch(batch_num_total) # call step() at the END of each epoch scheduler.step(validation_metrics, epoch) """ def __init__( self, optimizer: torch.optim.Optimizer, param_group_field: str, last_epoch
: int = -1 ) -> None: self.optimizer = optimizer self.param_group_field = param_group_field self._initial_param_group_field = f"initial_{param_group_field}" if last_epoch == -1: for i, group in enumerate(self.optimizer.param_groups):
if param_group_field not in group: raise KeyError(f"{param_group_field} missing from param_groups[{i}]") group.setdefault(self._initial_param_group_field, group[param_group_field]) else: for i, group in enumerate(self.optimizer.param_groups): if self._initial_param_group_field not in group: raise KeyError( f"{self._initial_param_group_field} missing from param_groups[{i}]" ) self.base_values = [ group[self._initial_param_group_field] for group in self.optimizer.param_groups ] self.last_epoch = last_epoch def state_dict(self) -> Dict[str, Any]: """ Returns the state of the scheduler as a `dict`. """ return {key: value for key, value in self.__dict__.items() if key != "optimizer"} def load_state_dict(self, state_dict: Dict[str, Any]) -> None: """ Load the schedulers state. # Parameters state_dict : `Dict[str, Any]` Scheduler state. Should be an object returned from a call to `state_dict`. """ self.__dict__.update(state_dict) def get_values(self): raise NotImplementedError def step(self, metric: float = None) -> None: self.last_epoch += 1 self.metric = metric for param_group, value in zip(self.optimizer.param_groups, self.get_values()): param_group[self.param_group_field] = value def step_batch(self, batch_num_total: int = None) -> None: """ By default, a scheduler is assumed to only update every epoch, not every batch. So this does nothing unless it's overriden. """ return
luxnovalabs/enjigo_door
web_interface/keyedcache/test_app/manage.py
Python
unlicense
668
0.005988
#!/usr/bin/env python import os.path import sys DIRNAME = os.path.dirname(__file__) if not DIRNAME in sys.path: sys.path.append(DIRNAME) from django.core.management import execute_manager try: import settings # Assumed to be in the s
ame directory. except ImportError: import sys sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'l
l have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__) sys.exit(1) if __name__ == "__main__": execute_manager(settings)
chenke91/ihaveablog
app/__init__.py
Python
mit
1,126
0.000888
f
rom flask import Flask, render_template from flask.ext.bootstrap import Bootstrap from flask.ext.moment import Moment from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.login import LoginManager from flask.ext.uploads import UploadSet, configure_uploads, IMAGES from config import config bootstrap = Bootstrap() moment = Moment() db = SQLAlchemy() login_manager = LoginManager() login_manager.session_protection = 'strong' login_manager.login_view =
'auth.login' avatars = UploadSet('avatars', IMAGES) def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) bootstrap.init_app(app) moment.init_app(app) db.init_app(app) login_manager.init_app(app) configure_uploads(app, (avatars)) from .main import main as main_blueprint from .auth import auth as auth_blueprint from .admin import admin as admin_blueprint app.register_blueprint(main_blueprint) app.register_blueprint(auth_blueprint, url_prefix='/auth') app.register_blueprint(admin_blueprint, url_prefix='/admin') return app
alex/mongoengine
tests/document.py
Python
mit
19,227
0.002236
import unittest from datetime import datetime import pymongo from mongoengine import * from mongoengine.base import BaseField from mongoengine.connection import _get_db class DocumentTest(unittest.TestCase): def setUp(self): connect(db='mongoenginetest') self.db = _get_db() class Person(Document): name = StringField() age = IntField() self.Person = Person def test_drop_collection(self): """Ensure that the collection may be dropped from the database. """ self.Person(name='Test').save() collection = self.Person._meta['collection'] self.assertTrue(collection in self.db.collection_names()) self.Person.drop_collection() self.assertFalse(collection in self.db.collection_names()) def test_definition(self): """Ensure that document may be defined using fields. """ name_field = StringField() age_field = IntField() class Person(Document): name = name_field age = age_field non_field = True self.assertEqual(Person._fields['name'], name_field) self.assertEqual(Person._fields['age'], age_field) self.assertFalse('non_field' in Person._fields) self.assertTrue('id' in Person._fields) # Test iteration over fields fields = list(Person()) self.assertTrue('name' in fields and 'age' in fields) # Ensure Document isn't treated like an actual document self.assertFalse(hasattr(Document, '_fields')) def test_get_superclasses(self): """Ensure that the correct list of superclasses is assembled. """ class Animal(Document): pass class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass mammal_superclasses = {'Animal':
Animal} self.assertEqual(Mammal._superclasses, mammal_superclasses) dog_superclasses = { 'Animal': Animal, 'Animal.Mammal': Mammal, } self.assertEqual(Dog._superclasses, dog_superclasses) def test_get_subclasses(self): """Ensure
that the correct list of subclasses is retrieved by the _get_subclasses method. """ class Animal(Document): pass class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass mammal_subclasses = { 'Animal.Mammal.Dog': Dog, 'Animal.Mammal.Human': Human } self.assertEqual(Mammal._get_subclasses(), mammal_subclasses) animal_subclasses = { 'Animal.Fish': Fish, 'Animal.Mammal': Mammal, 'Animal.Mammal.Dog': Dog, 'Animal.Mammal.Human': Human } self.assertEqual(Animal._get_subclasses(), animal_subclasses) def test_polymorphic_queries(self): """Ensure that the correct subclasses are returned from a query""" class Animal(Document): pass class Fish(Animal): pass class Mammal(Animal): pass class Human(Mammal): pass class Dog(Mammal): pass Animal().save() Fish().save() Mammal().save() Human().save() Dog().save() classes = [obj.__class__ for obj in Animal.objects] self.assertEqual(classes, [Animal, Fish, Mammal, Human, Dog]) classes = [obj.__class__ for obj in Mammal.objects] self.assertEqual(classes, [Mammal, Human, Dog]) classes = [obj.__class__ for obj in Human.objects] self.assertEqual(classes, [Human]) Animal.drop_collection() def test_inheritance(self): """Ensure that document may inherit fields from a superclass document. """ class Employee(self.Person): salary = IntField() self.assertTrue('name' in Employee._fields) self.assertTrue('salary' in Employee._fields) self.assertEqual(Employee._meta['collection'], self.Person._meta['collection']) # Ensure that MRO error is not raised class A(Document): pass class B(A): pass class C(B): pass def test_allow_inheritance(self): """Ensure that inheritance may be disabled on simple classes and that _cls and _types will not be used. """ class Animal(Document): meta = {'allow_inheritance': False} name = StringField() Animal.drop_collection() def create_dog_class(): class Dog(Animal): pass self.assertRaises(ValueError, create_dog_class) # Check that _cls etc aren't present on simple documents dog = Animal(name='dog') dog.save() collection = self.db[Animal._meta['collection']] obj = collection.find_one() self.assertFalse('_cls' in obj) self.assertFalse('_types' in obj) Animal.drop_collection() def create_employee_class(): class Employee(self.Person): meta = {'allow_inheritance': False} self.assertRaises(ValueError, create_employee_class) # Test the same for embedded documents class Comment(EmbeddedDocument): content = StringField() meta = {'allow_inheritance': False} def create_special_comment(): class SpecialComment(Comment): pass self.assertRaises(ValueError, create_special_comment) comment = Comment(content='test') self.assertFalse('_cls' in comment.to_mongo()) self.assertFalse('_types' in comment.to_mongo()) def test_collection_name(self): """Ensure that a collection with a specified name may be used. """ collection = 'personCollTest' if collection in self.db.collection_names(): self.db.drop_collection(collection) class Person(Document): name = StringField() meta = {'collection': collection} user = Person(name="Test User") user.save() self.assertTrue(collection in self.db.collection_names()) user_obj = self.db[collection].find_one() self.assertEqual(user_obj['name'], "Test User") user_obj = Person.objects[0] self.assertEqual(user_obj.name, "Test User") Person.drop_collection() self.assertFalse(collection in self.db.collection_names()) def test_capped_collection(self): """Ensure that capped collections work properly. """ class Log(Document): date = DateTimeField(default=datetime.now) meta = { 'max_documents': 10, 'max_size': 90000, } Log.drop_collection() # Ensure that the collection handles up to its maximum for i in range(10): Log().save() self.assertEqual(len(Log.objects), 10) # Check that extra documents don't increase the size Log().save() self.assertEqual(len(Log.objects), 10) options = Log.objects._collection.options() self.assertEqual(options['capped'], True) self.assertEqual(options['max'], 10) self.assertEqual(options['size'], 90000) # Check that the document cannot be redefined with different options def recreate_log_document(): class Log(Document): date = DateTimeField(default=datetime.now) meta = { 'max_documents': 11, } # Create the collection by accessing Document.objects Log.objects self.assertRaises(InvalidCollectionError, recreate_log_document) Log.drop_collection() def test_indexes(self): """Ensure that indexes are used when meta[indexes] is specified. """ class BlogPost(Document): date = DateTimeField(db_field='addDate', default=datetime.now) category = StringField() tags = ListField(St
kernel-sanders/arsenic-mobile
Dependencies/Twisted-13.0.0/doc/names/examples/testdns.py
Python
gpl-3.0
1,653
0
#!/usr/bin/env python # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Prints the results of an Address record lookup, Mail-Exchanger record lookup, and Nameserver record loo
kup for the given hostname for a given hostname. To run this script: $ python testdns.py <hostname> e.g.: $ python testdns.py www.google.com """ import sys from twisted.names import client from twisted.internet import defer, reactor from twisted.names import dns, error r = client.Resolver('/etc/resolv.conf') def formatResult(a, heading): answer, authority, additional = a lines = ['# ' + heading] for a in answer: line = [ a.name, dns.QU
ERY_CLASSES.get(a.cls, 'UNKNOWN (%d)' % (a.cls,)), a.payload] lines.append(' '.join(str(word) for word in line)) return '\n'.join(line for line in lines) def printError(f): f.trap(defer.FirstError) f = f.value.subFailure f.trap(error.DomainError) print f.value.__class__.__name__, f.value.message.queries def printResults(res): for r in res: print r print if __name__ == '__main__': domainname = sys.argv[1] d = defer.gatherResults([ r.lookupAddress(domainname).addCallback( formatResult, 'Addresses'), r.lookupMailExchange(domainname).addCallback( formatResult, 'Mail Exchangers'), r.lookupNameservers(domainname).addCallback( formatResult, 'Nameservers'), ], consumeErrors=True) d.addCallbacks(printResults, printError) d.addBoth(lambda ign: reactor.stop()) reactor.run()
openmotics/gateway
src/gateway/apartment_controller.py
Python
agpl-3.0
9,287
0.0028
# Copyright (C) 2021 OpenMotics BV # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ apartment controller manages the apartment objects that are known in the system """ import logging from gateway.events import EsafeEvent, EventError from gateway.exceptions import ItemDoesNotExistException, StateException from gateway.models import Apartment, Database from gateway.mappers import ApartmentMapper from gateway.dto import ApartmentDTO from gateway.pubsub import PubSub from ioc import INJECTED, Inject, Injectable, Singleton if False: # MyPy from typing import List, Optional, Dict, Any from esafe.rebus import RebusController logger = logging.getLogger(__name__) @Injectable.named('apartment_controller') @Singleton class ApartmentController(object): def __init__(self): self.rebus_controller = None # type: Optional[RebusController] def set_rebus_controller(self, rebus_controller): self.rebus_controller = rebus_controller @staticmethod @Inject def send_config_change_event(msg, error=EventError.ErrorTypes.NO_ERROR, pubsub=INJECTED): # type: (str, Dict[str, Any], PubSub) -> None event = EsafeEvent(EsafeEvent.Types.CONFIG_CHANGE, {'type': 'apartment', 'msg': msg}, error=error) pubsub.publish_esafe_event(PubSub.EsafeTopics.CONFIG, event) @staticmethod def load_apartment(apartment_id): # type: (int) -> Optional[ApartmentDTO] apartment_orm = Apartment.select().where(Apartment.id == apartment_id).first() if apartment_orm is None: return None apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm) return apartment_dto @staticmethod def load_apartment_by_mailbox_id(mailbox_id): # type: (int) -> Optional[ApartmentDTO] apartment_o
rm = Apartment.select().where(Apartment.mailbox_rebus_id == mailbox_id).first() if apartment_orm is None: return None apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm) return apartment_dto @staticmethod def load_apartment_by_doorbell_id(doorbell_id): # type: (int) -> Optional[ApartmentDTO] apartment_orm = Apartment.select().where(Apartment.doorbell_rebus_id == do
orbell_id).first() if apartment_orm is None: return None apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm) return apartment_dto @staticmethod def load_apartments(): # type: () -> List[ApartmentDTO] apartments = [] for apartment_orm in Apartment.select(): apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm) apartments.append(apartment_dto) return apartments @staticmethod def get_apartment_count(): # type: () -> int return Apartment.select().count() @staticmethod def apartment_id_exists(apartment_id): # type: (int) -> bool apartments = ApartmentController.load_apartments() ids = (x.id for x in apartments) return apartment_id in ids def _check_rebus_ids(self, apartment_dto): if self.rebus_controller is None: raise StateException("Cannot save apartment: Rebus Controller is None") if 'doorbell_rebus_id' in apartment_dto.loaded_fields and \ not self.rebus_controller.verify_device_exists(apartment_dto.doorbell_rebus_id): raise ItemDoesNotExistException("Cannot save apartment: doorbell ({}) does not exists".format(apartment_dto.doorbell_rebus_id)) if 'mailbox_rebus_id' in apartment_dto.loaded_fields and \ not self.rebus_controller.verify_device_exists(apartment_dto.mailbox_rebus_id): raise ItemDoesNotExistException("Cannot save apartment: mailbox ({}) does not exists".format(apartment_dto.mailbox_rebus_id)) def save_apartment(self, apartment_dto, send_event=True): # type: (ApartmentDTO, bool) -> ApartmentDTO self._check_rebus_ids(apartment_dto) apartment_orm = ApartmentMapper.dto_to_orm(apartment_dto) apartment_orm.save() if send_event: ApartmentController.send_config_change_event('save') return ApartmentMapper.orm_to_dto(apartment_orm) def save_apartments(self, apartments_dto): apartments_dtos = [] for apartment in apartments_dto: apartment_saved = self.save_apartment(apartment, send_event=False) apartments_dtos.append(apartment_saved) self.send_config_change_event('save') return apartments_dtos def update_apartment(self, apartment_dto, send_event=True): # type: (ApartmentDTO, bool) -> ApartmentDTO self._check_rebus_ids(apartment_dto) if 'id' not in apartment_dto.loaded_fields or apartment_dto.id is None: raise RuntimeError('cannot update an apartment without the id being set') try: apartment_orm = Apartment.get_by_id(apartment_dto.id) loaded_apartment_dto = ApartmentMapper.orm_to_dto(apartment_orm) for field in apartment_dto.loaded_fields: if field == 'id': continue if hasattr(apartment_dto, field): setattr(loaded_apartment_dto, field, getattr(apartment_dto, field)) apartment_orm = ApartmentMapper.dto_to_orm(loaded_apartment_dto) apartment_orm.save() if send_event: ApartmentController.send_config_change_event('update') return ApartmentMapper.orm_to_dto(apartment_orm) except Exception as e: raise RuntimeError('Could not update the user: {}'.format(e)) def update_apartments(self, apartment_dtos): # type: (List[ApartmentDTO]) -> Optional[List[ApartmentDTO]] apartments = [] with Database.get_db().transaction() as transaction: try: # First clear all the rebus fields in order to be able to swap 2 fields for apartment in apartment_dtos: apartment_orm = Apartment.get_by_id(apartment.id) # type: Apartment if 'mailbox_rebus_id' in apartment.loaded_fields: apartment_orm.mailbox_rebus_id = None if 'doorbell_rebus_id' in apartment.loaded_fields: apartment_orm.doorbell_rebus_id = None apartment_orm.save() # Then check if there is already an apartment with an mailbox or doorbell rebus id that is passed # This is needed for when an doorbell or mailbox gets assigned to another apartment. Then the first assignment needs to be deleted. for apartment_orm in Apartment.select(): for apartment_dto in apartment_dtos: if apartment_orm.mailbox_rebus_id == apartment_dto.mailbox_rebus_id and apartment_orm.mailbox_rebus_id is not None: apartment_orm.mailbox_rebus_id = None apartment_orm.save() if apartment_orm.doorbell_rebus_id == apartment_dto.doorbell_rebus_id and apartment_orm.doorbell_rebus_id is not None: apartment_orm.doorbell_rebus_id = None apartment_orm.save() for apartment in apartment_dtos: updated = self.update_apartment(apartment, send_event=False) if updated is not None: apartments.append(updated) self.se
ReconCell/smacha
smacha/src/smacha/templates/Base.tpl.py
Python
bsd-3-clause
2,545
0.070334
{% block meta %} name: Base description: SMACH base template. language: Python framework: SMACH type: Base tags: [core] includes: [] extends: [] variables: - - manifest: description: ROS manifest name. type: str - - node_name: description: ROS node name for the state machine. type: str - outcomes: description: A list of possible outcomes of the state machine. type: list - - userdata: description: Definitions for userdata needed by child states. type: dict - - function_name: descriptio
n: A name for the main executable state machine function. type: str input_keys: [] output_keys: [] {% endblock met
a %} {% from "Utils.tpl.py" import import_module, render_outcomes, render_userdata %} {% set defined_headers = [] %} {% set local_vars = [] %} {% block base_header %} #!/usr/bin/env python {{ base_header }} {% endblock base_header %} {% block imports %} {{ import_module(defined_headers, 'smach') }} {{ imports }} {% endblock imports %} {% block defs %} {{ defs }} {% endblock defs %} {% block class_defs %} {{ class_defs }} {% endblock class_defs %} {% block cb_defs %} {{ cb_defs }} {% endblock cb_defs %} {% if name is defined %}{% set sm_name = name | lower() %}{% else %}{% set sm_name = 'sm' %}{% endif %} {% block main_def %} def {% if function_name is defined %}{{ function_name | lower() }}{% else %}main{% endif %}(): {{ main_def | indent(4) }} {% endblock main_def %} {% block body %} {{ sm_name }} = smach.StateMachine({{ render_outcomes(outcomes) }}) {# Container header insertion variable indexed by container state name #} {% if name in header %}{{ header[name] | indent(4, true) }}{% endif %} {# Render container userdata #} {% if userdata is defined %}{{ render_userdata(name | lower(), userdata) | indent(4) }}{% endif %} {# Render state userdata #} {% if name in header_userdata %}{{ header_userdata[name] | indent(4, true) }}{% endif %} with {{ sm_name }}: {# Container body insertion variable #} {{ body | indent(8) }} {% endblock body %} {% block footer %} {{ footer | indent(8) }} {% endblock footer %} {% block execute %} {{ execute | indent(4) }} outcome = {{ sm_name }}.execute() {% endblock execute %} {% block base_footer %} {{ base_footer | indent(4) }} {% endblock base_footer %} {% block main %} if __name__ == '__main__': {{ '' | indent(4, true) }}{% if function_name is defined %}{{ function_name | lower() }}{% else %}main{% endif %}() {% endblock main %}
davisjoe/joesrobotchallenge
expermients/hello Joe.py
Python
mit
171
0.035088
Python 3.4.2 (default, Oct 19 2
014, 13:31:11) [GCC 4.9.1] on linux Type "copyright", "credits" or
"license()" for more information. >>> print("hello Joe") hello Joe >>>
balohmatevz/steamapi
steamapi/app.py
Python
mit
6,268
0.002234
__author__ = 'SmileyBarry' from .core import APIConnection, SteamObject, store from .decorators import cached_property, INFINITE class SteamApp(SteamObject): def __init__(self, appid, name=None, owner=None): self._id = appid if name is not None: import time self._cache = dict() self._cache['name'] = (name, time.time()) # Normally, the associated userid is also the owner. # That would not be the case if the game is borrowed, though. In that case, the object creator # usually defines attributes accordingly. However, at this time we can't ask the API "is this # game borrowed?", unless it's the actively-played game, so this distinction isn't done in the # object's context, but in the object creator's context. self._owner = owner self._userid = self._owner @cached_property(ttl=INFINITE) def _schema(self): return APIConnection().call("ISteamUserStats", "GetSchemaForGame", "v2", appid=self._id) @property def appid(self): return self._id @cached_property(ttl=INFINITE) def achievements(self): global_percentages = APIConnection().call("ISteamUserStats", "GetGlobalAchievementPercentagesForApp", "v0002", gameid=self._id) if self._userid is not None: # Ah-ha, this game is associated to a user! userid = self._userid unlocks = APIConnection().call("ISteamUserStats", "GetUserStatsForGame", "v2", appid=self._id, steamid=userid) if 'achievements' in unlocks.playerstats: unlocks = [associated_achievement.name for associated_achievement in unlocks.playerstats.achievements if associated_achievement.achieved != 0] else: userid = None unlocks = None achievements_list = [] for achievement in self._schema.game.availableGameStats.achievements: achievement_obj = SteamAchievement(self._id, achievement.name, achievement.displayName, userid) achievement_obj._cache = {} if achievement.hidden == 0: store(achievement_obj, "is_hidden", False) else: store(achievement_obj, "is_hidden", True) for global_achievement in global_percentages.achievementpercentages.achievements: if global_achievement.name == achievement.name: achievement_obj.unlock_percentage = global_achievement.percent achievements_list += [achievement_obj] if unlocks is not None: for achievement in achievements_list: if achievement.apiname in unlocks: store(achievement, "is_achieved", True) else: store(achievement, "is_achieved", False) return achievements_list @cached_property(ttl=INFINITE) def name(self): return self._schema.game.gameName @cached_property(ttl=INFINITE) def owner(self): if self._owner is None: return self._userid else: return self._owner def __str__(self): return self.name def __hash__(self): # Don't just use the ID so ID collision between different types of objects wouldn't cause a match. return hash(('app', self.id)) class SteamAchievement(SteamObject): def __init__(self, linked_appid, apiname, displayname, linked_userid=None): """ Initialise a new instance of SteamAchievement. You shouldn't create one yourself, but from "SteamApp.achievements" instead. :param linked_appid: The AppID associated with this achievement. :type linked_appid: int :param apiname: The API-based name of this achievement. Usually a string. :type apiname: str or unicode :param displayname: The achievement's user-facing name. :type displayname: str or unicode :param linked_userid: The user ID this achievement is linked to. :type linked_userid: int :return: A new SteamAchievement instance. """ self._appid = linked_appid self._id = apiname self._displayname = displayname self._userid = linked_userid self.unlock_percentage = 0.0 def __hash__(self): # Don't just use the ID so ID collision between different types of objects wouldn't ca
use a match. return hash((self.id, self._appid)) @property def appid(self): return self._appid @property def name(self): return self._
displayname @property def apiname(self): return self._id @cached_property(ttl=INFINITE) def is_hidden(self): response = APIConnection().call("ISteamUserStats", "GetSchemaForGame", "v2", appid=self._appid) for achievement in response.game.availableGameStats.achievements: if achievement.name == self._id: if achievement.hidden == 0: return False else: return True @cached_property(ttl=INFINITE) def is_unlocked(self): if self._userid is None: raise ValueError("No Steam ID linked to this achievement!") response = APIConnection().call("ISteamUserStats", "GetPlayerAchievements", "v1", steamid=self._userid, appid=self._appid, l="English") for achievement in response.playerstats.achievements: if achievement.apiname == self._id: if achievement.achieved == 1: return True else: return False # Cannot be found. return False
ESOedX/edx-platform
lms/djangoapps/grades/migrations/0001_initial.py
Python
agpl-3.0
2,347
0.002983
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import django.utils.timezone import model_utils.fields from django.db import migrations, models from opaque_keys.edx.django.models import CourseKeyField, UsageKeyField from lms.djangoapps.courseware.fields import UnsignedBigIntAutoField class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='PersistentSubsectionGrade', fields=[ ('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False)), ('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False)), ('id', UnsignedBigIntAutoField(serialize=False, primary_key=True)), ('user_id', models.IntegerField()), ('course_id', CourseKeyField(max_length=255)), ('usage_key', UsageKeyField(max_length=255)), ('subtree_edited_date', models.DateTimeField(verbose_name=b'last content edit timestamp')), ('course_version', models.CharField(max_length=255, verbose_name=b'guid of latest course version', blank=True)), ('earned_all', models.FloatField()), ('possible_all', models.FloatField()), ('earned_graded', models.FloatField()), ('possible_graded', models.FloatField()), ], ), migrations.CreateModel( name='VisibleBlocks', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, aut
o_created=True, primary_key=True)), ('blocks_json', models.TextField()), ('hashed', models.CharField(unique=True, max_length=100)), ], ), m
igrations.AddField( model_name='persistentsubsectiongrade', name='visible_blocks', field=models.ForeignKey(to='grades.VisibleBlocks', db_column=b'visible_blocks_hash', to_field=b'hashed', on_delete=models.CASCADE), ), migrations.AlterUniqueTogether( name='persistentsubsectiongrade', unique_together=set([('course_id', 'user_id', 'usage_key')]), ), ]
Stanford-Online/edx-platform
cms/djangoapps/contentstore/features/courses.py
Python
agpl-3.0
2,137
0.001872
# pylint: disable=missing-docstring # pylint: disable=redefined-outer-name # pylint: disable=unused-argument from lettuce import step, world from common import * ############### ACTIONS #################### @step('There are no courses$') def no_courses(step): world.clear_courses() create_studio_user() @step('I click the New Course button$') def i_click_new_course(step): world.css_click('.new-course-button') @step('I fill in the new course information$') def i_fill_in_a_new_course_information(step): fill_in_course_info() @step('I create a course with "([^"]*)", "([^"]*)", "([^"]*)", and "([^"]*)"') def i_create_course(step, name, org, number, run): fill_in_course_info(name=name, org=org, num=number, run=run) @step('I create a new course$') def i_create_a_course(step): create_a_course() @step('I click the course link in Studio Home$') def i_click_the_course_link_in_studio_home(step): # pylint: disable=invalid-name course_css = 'a.course-link' world.css_click(course_css) @step('I see an error about the length of the org/course/run tuple') def i_see_error_about_length(step): assert world.css_has_text( '#course_creation_error', 'The combined length of the organization, course number, ' 'and course run fields cannot be more than 65 characters.' ) ############ ASSERTIONS ################### @step('the Courseware page has loaded in Studio$') def courseware_page_has_loaded_in_studio(step): course_title_css = 'span.course-title' assert world.is_css_present(course_title_css) @step('I see the course listed in Studio Home$') def i_see_the_course_in_studio_home(step): course_css = 'h3.class-title' assert world.css_has_text(course_css, world.scenario_dict['COURSE'].display_name) @step('I am on the "([^"]*)" tab$') def i_am_on_tab(step, tab_name): header_css = 'div.inner-wrapper h1' assert world.css_has_text(header_css, tab_nam
e) @step('I see a link for adding a new section$') def i_see_new_section
_link(step): link_css = '.outline .button-new' assert world.css_has_text(link_css, 'New Section')
AbhilashReddyM/GeometricMultigrid
example_FMG_defcor.py
Python
mit
3,299
0.049712
""" This is an example showing how to use the mgd2d solver. A 4th order accurate solution is obtained with the 5pt stencil, by using deferred correction. """ import numpy as np import time from mgd2d import FMG,V_cycle #analytical solution def Uann(x,y,n): return np.sin(2*n*np.pi*x)*np.sin(2*n*np.pi*y) #RHS corresponding to above def source(x,y,n): return -8 * (np.pi)**2 * n**2 * np.sin(2*n*np.pi*x) * np.sin(2*n*np.pi*y) #input #input #FMG is a direct solver. tolerance and iterations are not used #nv = 1 # nv : Number of V-cycles within FMG. nv=1 will give solution a to within discretization error. # Increase this to get a higher accuracy solution (upto roundoff limit) # of the discrete problem.(residual on the fine grid =round off limit) # Here I am using nv=2 for the first solve and nv=6 for the second solve. nlevels = 7 #total number of grid levels. 1 means no multigrid, 2 means one coarse grid
. etc # Number of points is based on the number of multigrid levels as # N=A*2**(num_levels-1) where A is an integer >=4. Smaller A is better # This is a cell centered discretization NX = 4*2**(nlevels-1) NY = 4*2**(nlevels-1) #the grid has one layer of ghost cells to help apply the boundary conditions uann=np.zeros([NX+2,NY+2])#analytical solution u =np.zeros([NX+2,NY+2])#approximation f =np.zeros([NX+2,NY+2])#RHS #for deferred correction uxx
= np.zeros_like(u) corr = np.zeros_like(u) #calcualte the RHS and exact solution DX=1.0/NX DY=1.0/NY n=1 # number of waves in the solution xc=np.linspace(0.5*DX,1-0.5*DX,NX) yc=np.linspace(0.5*DY,1-0.5*DY,NY) XX,YY=np.meshgrid(xc,yc,indexing='ij') uann[1:NX+1,1:NY+1]=Uann(XX,YY,n) f[1:NX+1,1:NY+1]=source(XX,YY,n) print('mgd2d.py : Two Dimensional geometric multigrid solver') print('NX:',NX,', NY:',NY,', levels: ',nlevels) #start solving tb=time.time() u,res=FMG(NX,NY,nlevels,f,2) error=np.abs(uann[1:NX+1,1:NY+1]-u[1:NX+1,1:NY+1]) print(' 2nd Order::L_inf (true error): ',np.max(np.max(error))) print(' Elapsed time: ',time.time()-tb,' seconds') print('Improving approximation using deferred correction') #deferred correction #refer Leveque, p63 Ax=1.0/DX**2 Ay=1.0/DY**2 for i in range(1,NX+1): for j in range(1,NY+1): uxx[i,j]=(u[i+1,j]+u[i-1,j] - 2*u[i,j])/DX**2 # we should be using one-sided difference formulae for values # near the boundary. For simplicity I am just applying the # condition known from the analytical form for these terms. uxx[ 0,:] = -uxx[ 1,:] uxx[-1,:] = -uxx[-2,:] uxx[:, 0] = -uxx[:, 1] uxx[:,-1] = -uxx[:,-2] f[ 0,:] = -f[ 1,:] f[-1,:] = -f[-2,:] f[:, 0] = -f[:, 1] f[:,-1] = -f[:,-2] #correction term # del2(f)-2*uxxyy for i in range(1,NX+1): for j in range(1,NY+1): corr[i,j]=(Ax*(f[i+1,j]+f[i-1,j])+Ay*(f[i,j+1]+f[i,j-1])-2.0*(Ax+Ay)*f[i,j])-2*(uxx[i,j+1]+uxx[i,j-1] - 2*uxx[i,j])/DY**2 #adjust the RHS to cancel the leading order terms for i in range(1,NX+1): for j in range(1,NY+1): f[i,j]+= 1.0/12*DX**2*(corr[i,j]) ##solve once again with the new RHS u,res=FMG(NX,NY,nlevels,f,5) tf=time.time() error=np.abs(uann[1:NX+1,1:NY+1]-u[1:NX+1,1:NY+1]) print(' 4nd Order::L_inf (true error): ',np.max(np.max(error))) print('Elapsed time: ',tf-tb,' seconds')
andresriancho/collector
aws_collector/utils/collect.py
Python
gpl-2.0
4,386
0.000912
import os import logging import time import ConfigParser from fabric.operations import get from fabric.api import sudo, local, lcd, cd, shell_env from aws_collector.config.config import MAIN_CFG, S3_BUCKET OUTPUT_FILE_FMT = '%s-%s-collect-output.tar' S3_UPLOAD_CMD = 'aws s3 cp --region us-east-1 %s s3://%s/%s' def collect(conf, performance_results, output, version, instance): """ Copy the files from the remote EC2 instance to the local file system for later analysis. :param performance_results: The expression (/tmp/*.cpu) that output files of the performance test will match, and the ones we need to copy to our host. :param output: The local directory where we'll copy the remote files """ version = version.replace('/', '-') output_file = OUTPUT_FILE_FMT % (int(time.time()),
version) logging.info('Output statisti
cs:') sudo('ls -lah %s' % performance_results) sudo('du -sh %s' % performance_results) logging.info('Compressing output...') # performance_results looks like /tmp/collector/w3af-* path, file_glob = os.path.split(performance_results) with cd(path): sudo('tar -cpvf /tmp/%s %s' % (output_file, file_glob)) # Append config information to tar sudo('tar -C /tmp/ -rpvf /tmp/%s config' % output_file) # Compress tar file sudo('bzip2 -9 /tmp/%s' % output_file) output_file = '%s.bz2' % output_file remote_path = '/tmp/%s' % output_file sudo('ls -lah %s' % remote_path) # Uploading to S3 try: target_bucket = conf.get(MAIN_CFG, S3_BUCKET) except KeyError: pass else: aws_access, aws_secret = get_aws_credentials() if aws_access and aws_secret: logging.debug('Uploading data to S3...') s3_upload = S3_UPLOAD_CMD % (remote_path, target_bucket, output_file) # Needed to upload sudo('sudo pip install --upgrade awscli') with cd('/tmp/'): with shell_env(AWS_ACCESS_KEY_ID=aws_access, AWS_SECRET_ACCESS_KEY=aws_secret): sudo(s3_upload) else: logging.info('Failed to upload data to S3: No AWS credentials' ' were configured in AWS_ACCESS_KEY AWS_SECRET_KEY') # Downloading to my workstation logging.info('Downloading performance information, might take a while...') # Create the output directory if it doesn't exist output = os.path.expanduser(output) local_path = os.path.join(output, version) # # Before I stored the output in ~/performance_info/<version>/<instance-id> # but that did not help with the analysis phase, since I had to remember # those "long" EC2 instance IDs and... it had nothing to do with the # analysis itself. # # Now I just use ~/performance_info/<version>/<unique-incremental-id> # where unique-incremental-id is just a number that starts from 0 and # increments # i = -1 while True: i += 1 potential_output_path = os.path.join(local_path, '%s' % i) if not os.path.exists(potential_output_path): os.makedirs(potential_output_path) local_path = potential_output_path break # Get the remote file with all the data local_file_path = os.path.join(local_path, output_file) get(remote_path=remote_path, local_path=local_file_path) logging.debug('Decompress downloaded data...') with lcd(local_path): local('tar -jxpvf %s' % output_file) os.unlink(local_file_path) def get_aws_credentials(): """ :return: AWS_ACCESS_KEY AWS_SECRET_KEY from environment variables or ~/.boto """ if os.environ.get('AWS_ACCESS_KEY') and os.environ.get('AWS_SECRET_KEY'): return os.environ.get('AWS_ACCESS_KEY'), os.environ.get('AWS_SECRET_KEY') elif os.path.exists(os.path.expanduser('~/.boto')): config = ConfigParser.ConfigParser() config.read(os.path.expanduser('~/.boto')) aws_access = config.get('Credentials', 'aws_access_key_id', None) aws_secret = config.get('Credentials', 'aws_secret_access_key', None) return aws_access, aws_secret return None, None
rosenvladimirov/addons
partner_vat_search/models/__init__.py
Python
agpl-3.0
120
0
# -*- coding: utf-8 -*- # License AGPL-3.0 or later
(http://www.gnu.org/licenses/agpl.html). from . import r
es_partner
lpramuk/robottelo
tests/foreman/endtoend/test_cli_endtoend.py
Python
gpl-3.0
12,481
0.001683
"""Smoke tests for the ``CLI`` end-to-end scenario. :Requirement: Cli Endtoend :CaseAutomation: Automated :CaseLevel: Acceptance :CaseComponent: Hammer :Assignee: gtalreja :TestType: Functional :CaseImportance: High :Upstream: No """ import pytest from fauxfactory import gen_alphanumeric from fauxfactory import gen_ipaddr from robottelo import manifests from robottelo import ssh from robottelo.cli.activationkey import ActivationKey from robottelo.cli.computeresource import ComputeResource from robottelo.cli.contentview import ContentView from robottelo.cli.domain import Domain from robottelo.cli.factory import make_user from robottelo.cli.host import Host from robottelo.cli.hostgroup import HostGroup from robottelo.cli.lifecycleenvironment import LifecycleEnvironment from robottelo.cli.location import Location from robottelo.cli.org import Org from robottelo.cli.product import Product from robottelo.cli.repository import Repository from robottelo.cli.repository_set import RepositorySet from robottelo.cli.subnet import Subnet from robottelo.cli.subscription import Subscription from robottelo.cli.user import User from robottelo.config import setting_is_set from robottelo.config import settings from robottelo.constants import DEFAULT_LOC from robottelo.constants import DEFAULT_ORG from robottelo.constants import DEFAULT_SUBSCRIPTION_NAME from robottelo.constants import PRDS from robottelo.constants import REPOS from robottelo.constants import REPOSET from robottelo.constants.repos import CUSTOM_RPM_REPO AK_CONTENT_LABEL = 'rhel-6-server-rhev-agent-rpms' @pytest.fixture(scope='module') def fake_manifest_is_set(): return setting_is_set('fake_manifest') @pytest.mark.tier1 @pytest.mark.upgrade def test_positive_cli_find_default_org(): """Check if 'Default Organization' is present :id: 95ffeb7a-134e-4273-bccc-fe8a3a336b2a :expectedresults: 'Default Organization' is found """ result = Org.info({'name': DEFAULT_ORG}) assert result['name'] == DEFAULT_ORG @pytest.mark.tier1 @pytest.mark.upgrade def test_positive_cli_find_default_loc(): """Check if 'Default Location' is present :id: 11cf0d06-78ff-47e8-9d50-407a2ea31988 :expectedresults: 'Default Location' is found """ result = Location.info({'name': DEFAULT_LOC}) assert result['name'] == DEFAULT_LOC @pytest.mark.tier1 @pytest.mark.upgrade def test_positive_cli_find_admin_user(): """Check if Admin User is present :id: f6755189-05a6-4d2f-a3b8-98be0cfacaee :expectedresults: Admin User is found and has Admin role """ result = User.info({'login': 'admin'}) assert result['login'] == 'admin' assert result['admin'] == 'yes' @pytest.mark.skip_if_not_set('libvirt') @pytest.mark.tier4 @pytest.mark.upgrade @pytest.mark.skipif((not settings.robottelo.REPOS_HOSTING_URL), reason='Missing repos_hosting_url') def test_positive_cli_end_to_end(fake_manifest_is_set, rhel6_contenthost, default_sat): """Perform end to end smoke tests using RH and custom repos. 1. Create a new user with admin permissions 2. Using the new user from above 1. Create a new organization 2. Clone and upload manifest 3. Create a new lifecycle environment 4. Create a custom product 5. Create a custom YUM repository 6. Enable a Red Hat repository 7. Synchronize the three repositories 8. Create a new content view 9. Associate the YUM and Red Hat repositories to new content view 10. Publish content view 11. Promote content view to the lifecycle environment 12. Create a new activation key 13. Add the products to t
he activation key 14. Create a new libvirt compute resou
rce 15. Create a new subnet 16. Create a new domain 17. Create a new hostgroup and associate previous entities to it 18. Provision a client ** NOT CURRENTLY PROVISIONING :id: 8c8b3ffa-0d54-436b-8eeb-1a3542e100a8 :expectedresults: All tests should succeed and Content should be successfully fetched by client. """ # step 1: Create a new user with admin permissions password = gen_alphanumeric() user = make_user({'admin': 'true', 'password': password}) user['password'] = password # step 2.1: Create a new organization org = _create(user, Org, {'name': gen_alphanumeric()}) # step 2.2: Clone and upload manifest if fake_manifest_is_set: with manifests.clone() as manifest: ssh.upload_file(manifest.content, manifest.filename) Subscription.upload({'file': manifest.filename, 'organization-id': org['id']}) # step 2.3: Create a new lifecycle environment lifecycle_environment = _create( user, LifecycleEnvironment, {'name': gen_alphanumeric(), 'organization-id': org['id'], 'prior': 'Library'}, ) # step 2.4: Create a custom product product = _create(user, Product, {'name': gen_alphanumeric(), 'organization-id': org['id']}) repositories = [] # step 2.5: Create custom YUM repository yum_repo = _create( user, Repository, { 'content-type': 'yum', 'name': gen_alphanumeric(), 'product-id': product['id'], 'publish-via-http': 'true', 'url': CUSTOM_RPM_REPO, }, ) repositories.append(yum_repo) # step 2.6: Enable a Red Hat repository if fake_manifest_is_set: RepositorySet.enable( { 'basearch': 'x86_64', 'name': REPOSET['rhva6'], 'organization-id': org['id'], 'product': PRDS['rhel'], 'releasever': '6Server', } ) rhel_repo = Repository.info( { 'name': REPOS['rhva6']['name'], 'organization-id': org['id'], 'product': PRDS['rhel'], } ) repositories.append(rhel_repo) # step 2.7: Synchronize the three repositories for repo in repositories: Repository.with_user(user['login'], user['password']).synchronize({'id': repo['id']}) # step 2.8: Create content view content_view = _create( user, ContentView, {'name': gen_alphanumeric(), 'organization-id': org['id']} ) # step 2.9: Associate the YUM and Red Hat repositories to new content view for repo in repositories: ContentView.add_repository( { 'id': content_view['id'], 'organization-id': org['id'], 'repository-id': repo['id'], } ) # step 2.10: Publish content view ContentView.with_user(user['login'], user['password']).publish({'id': content_view['id']}) # step 2.11: Promote content view to the lifecycle environment content_view = ContentView.with_user(user['login'], user['password']).info( {'id': content_view['id']} ) assert len(content_view['versions']) == 1 cv_version = ContentView.with_user(user['login'], user['password']).version_info( {'id': content_view['versions'][0]['id']} ) assert len(cv_version['lifecycle-environments']) == 1 ContentView.with_user(user['login'], user['password']).version_promote( {'id': cv_version['id'], 'to-lifecycle-environment-id': lifecycle_environment['id']} ) # check that content view exists in lifecycle content_view = ContentView.with_user(user['login'], user['password']).info( {'id': content_view['id']} ) assert len(content_view['versions']) == 1 cv_version = ContentView.with_user(user['login'], user['password']).version_info( {'id': content_view['versions'][0]['id']} ) assert len(cv_version['lifecycle-environments']) == 2 assert cv_version['lifecycle-environments'][-1]['id'] == lifecycle_environment['id'] # step 2.12: Create a new activation key activation_key = _create( user, ActivationKey, { 'content-view-id': content_view['id'], 'lifecycle-environment-id': lifecycle_environment['id'], 'name': ge
tolimit/tp-qemu
generic/tests/jumbo.py
Python
gpl-2.0
7,693
0.00052
import logging import commands import random from autotest.client.shared import error from autotest.client import utils from virttest import utils_misc from virttest import utils_test from virttest import utils_net @error.context_aware def run(test, params, env): """ Test the RX jumbo frame function of vnics: 1) Boot the VM. 2) Change the MTU of guest nics and host taps depending on the NIC model. 3) Add the static ARP entry for guest NIC. 4) Wait for the MTU ok. 5) Verify the path MTU using ping. 6) Ping the guest with large frames. 7) Increment size ping. 8) Flood ping the guest with large frames. 9) Verify the path MTU. 10) Recover the MTU. :param test: QEMU test object. :param params: Dictionary with the test parameters. :param env: Dictionary with test environment. """ timeout = int(params.get("login_timeout", 360)) mtu = params.get("mtu", "1500") def_max_icmp_size = int(mtu) - 28 max_icmp_pkt_size = int(params.get("max_icmp_pkt_size", def_max_icmp_size)) flood_time = params.get("flood_time", "300") os_type = params.get("os_type") os_variant = params.get("os_variant") vm = env.get_vm(params["main_vm"]) vm.verify_alive() session = vm.wait_for_login(timeout=timeout) session_serial = vm.wait_for_serial_login(timeout=timeout) ifname = vm.get_ifname(0) guest_ip = vm.get_address(0) if guest_ip is None: raise error.TestError("Could not get the guest ip address") try: error.context("Changing the MTU of guest", logging.info) # Environment preparation mac = vm.get_mac_address(0) if os_type == "linux": ethname = utils_net.get_linux_ifname(session, mac) guest_mtu_cmd = "ifconfig %s mtu %s" % (ethname, mtu) else: connection_id = utils_net.get_windows_nic_attribute(session, "macaddress", mac, "netconnectionid") index = utils_net.get_windows_nic_attribute(session, "netconnectionid", connection_id, "index") if os_variant == "winxp": pnpdevice_id = utils_net.get_windows_nic_attribute(session, "netconnectionid", connection_id, "pnpdeviceid") cd_num = utils_misc.get_winutils_vol(session) copy_cmd = r"xcopy %s:\devcon\wxp_x86\devcon.exe c:\ " % cd_num session.cmd(copy_cmd) reg_set_mtu_pattern = params.get("reg_mtu_cmd") mtu_key_word = params.get("mtu_key", "MTU") reg_set_mtu = reg_set_mtu_pattern % (int(index), mtu_key_word, int(mtu)) guest_mtu_cmd = "%s " % reg_set_mtu session.cmd(guest_mtu_cmd) if os_type == "windows": mode = "netsh" if os_variant == "winxp": connection_id = pnpdevice_id.split("&")[-1] mode = "devcon" utils_net.restart_windows_guest_network(session_serial, connection_id, mode=mode) error.context("Chaning the MTU of host tap ...", logging.info) host_mtu_cmd = "ifconfig %s mtu %s" # Before change macvtap mtu, must set the base interface mtu if params.get("nettype") == "macvtap": base_if = utils_net.get_macvtap_base_iface(params.get("netdst")) utils.run(host_mtu_cmd % (base_if, mtu)) utils.run(host_mtu_cmd % (ifname, mtu)) error.context("Add a temporary static ARP entry ...", logging.info) arp_add_cmd = "arp -s %s %s -i %s" % (guest_ip, mac, ifname) utils.run(arp_add_cmd) def is_mtu_ok(): status, _ = utils_test.ping(guest_ip, 1, interface=ifname, packetsize=max_icmp_pkt_size, hint="do", timeout=2) return status == 0 def verify_mtu(): logging.info("Verify the path MTU") status, output = utils_test.ping(guest_ip, 10, interface=ifname, packetsize=max_icmp_pkt_size, hint="do", timeout=15) if status != 0: logging.error(output) raise error.TestFail("Path MTU is not as expected") if utils_test.get_loss_ratio(output) != 0: logging.error(output) raise error.TestFail("Packet loss ratio during MTU " "verification is not zero") def flood_ping(): logging.info("Flood with large frames") utils_test.ping(guest_ip, interface=ifname, packetsize=max_icmp_pkt_size, flood=True, timeout=float(flood_time)) def large_frame_ping(count=100): logging.info("Large frame ping") _, output = utils_test.ping(guest_ip, count, interface=ifname, packetsize=max_icmp_pkt_size, timeout=float(count) * 2) ratio = utils_test.get_loss_ratio(output) if ratio != 0: raise error.TestFail("Loss ratio of large frame ping is %s" % ratio) def size_increase_ping(step=random.randrange(90, 110)): logging.info("Size increase ping") for size in range(0, max_icmp_pkt_size + 1, step): logging.info("Ping %s with size %s", guest_ip, size) status, output = utils_test.ping(guest_ip, 1, interface=ifname, packetsize=size, hint="do", timeout=1) if status != 0: status, output = utils_test.ping(guest_ip, 10, interface=ifname, packetsize=size, adaptive=True, hint="do", timeout=20) fail_ratio = int(params.get("fail_ratio", 50)) if utils_test.get_loss_ratio(output) > fail_ratio: raise error.TestFail("Ping loss ratio is greater " "than 50% for size %s" % size) logging.info("Waiting for the MTU to be OK") wait_mtu_ok = 10 if not utils_misc.wait_for(is_mtu_ok, wait_mtu_ok, 0, 1): logging.debug(commands.getoutput("ifconfig -a")) raise error.TestError("MTU is not as expected even after %s " "seconds" % wait_mtu_ok) # Functional Test error.context("Checking whether MTU change is ok", logging.info) verify_mtu()
large_frame_ping() size_increase_ping() # S
tress test flood_ping() verify_mtu() finally: # Environment clean if session: session.close() if utils.system("grep '%s.*%s' /proc/net/arp" % (guest_ip, ifname)) == '0': utils.run("arp -d %s -i %s" % (guest_ip, ifname)) logging.info("Removing the temporary ARP entry successfully")
anton-golubkov/Garland
src/test/test_splitblock.py
Python
lgpl-2.1
4,345
0.008055
#------------------------------------------------------------------------------- # Copyright (c) 2011 Anton Golubkov. # All rights reserved. This program and the accompanying materials # are made available under the terms of the GNU Lesser Public License v2.1 # which accompanies this distribution, and is available at # http://www.gnu.org/licenses/old-licenses/gpl-2.0.html # # Contributors: # Anton Golubkov - initial API and implementation #------------------------------------------------------------------------------- #!/usr/bin/python # -*- coding: utf-8 -*- import unittest import cv import os, sys cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__))) if cmd_folder not in sys.path: sys.path.insert(0, cmd_folder) import ipf.ipfblock.split class TestSplitBlock(unittest.TestCase): def setUp(self): self.block = ipf.ipfblock.split.Split() self.test_image = cv.LoadImage("files/test.png") self.block.input_ports["input_image"].pass_value(self.test_image) def test_output_image_channels(self): """ Test return to output ports 3 one-channel images of same size """ self.block.process() image_1 = self.block.output_ports["output_image_1"].get_value() image_2 = self.block.output_ports["output_image_2"].get_value() image_3 = self.block.output_ports["output_image_3"].get_value() self.assertEqual(image_1.nChannels, 1) self.assertEqual(image_2.nChannels, 1) self.assertEqual(image_3.nChannels, 1) def test_output_image_size(self): """ Test return to output ports images of same size """ self.block.process() image_1 = self.block.output_ports["output_image_1"].get_value() image_2 = self.block.output_ports["output_image_2"].get_value() image_3 = self.block.output_ports["output_image_3"].get_value() self.assertEqual((image_1.width, image_1.height), (self.test_image.width, self.test_image.height)) self.assertEqual((image_2.width, image_2.height), (self.test_image.width, self.test_image.height)) self.assertEqual((image_3.width, image_3.height), (self.test_image.width, self.test_image.height)) def test_output_image(self): self.block.process() output_image_1 = self.block.output_ports["output_image_1"].get_value() output_image_2 = self.block.output_ports["output_image_2"].get_value() output_image_3 = self.block.output_ports["output_image_3"].get_value() cv.SaveImage("files/test_split_out_1.png", output_image_1) cv.SaveImage("files/test_split_out_2.png", output_image_2) cv.SaveImage("files/test_split_out_3.png", output_image_3) loaded_image = cv.LoadImage("files/test_split_out_1.png") test_loaded_image = cv.LoadImage("files/test_split_1.png") sel
f.assertEqual(loaded_image.tostring(), test_loaded_image.tostring()) loaded_image = cv.LoadImage("files/test_split_out_2.png") test_loaded_image = cv.LoadImage("files/test_split_2.png") self.assertEqual(loaded_image.tostring(), test_loaded_image.tostring()) loaded_image = cv.LoadImage("files/test_split_out_3.png") test_loaded_image = cv.LoadImage("files/test_split_3.png") self.assertEqual(loaded_image.tostring(), test_l
oaded_image.tostring()) def test_zero_image(self): zero_image = cv.CreateImage( (0, 0), cv.IPL_DEPTH_8U, 3) self.block.input_ports["input_image"].pass_value(zero_image) self.block.process() zero_image_1c = cv.CreateImage( (0, 0), cv.IPL_DEPTH_8U, 1) output_image_1 = self.block.output_ports["output_image_1"].get_value() output_image_2 = self.block.output_ports["output_image_2"].get_value() output_image_3 = self.block.output_ports["output_image_3"].get_value() self.assertEqual(output_image_1.tostring(), zero_image_1c.tostring()) self.assertEqual(output_image_2.tostring(), zero_image_1c.tostring()) self.assertEqual(output_image_3.tostring(), zero_image_1c.tostring()) if __name__ == '__main__': unittest.main()
SUSE/azure-sdk-for-python
azure-mgmt-sql/azure/mgmt/sql/models/encryption_protector_paged.py
Python
mit
918
0
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.paging import Paged class EncryptionProtectorPaged(Paged): """ A paging con
tainer for iterating over a list of EncryptionProtector object """ _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[EncryptionPr
otector]'} } def __init__(self, *args, **kwargs): super(EncryptionProtectorPaged, self).__init__(*args, **kwargs)
eric/whisper-rb
lib/whisper/py/whisper.py
Python
mit
21,690
0.026418
#!/usr/bin/env python # Copyright 2008 Orbitz WorldWide # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # This module is an implementation of the Whisper database API # Here is the basic layout of a whisper data file # # File = Header,Data # Header = Metadata,ArchiveInfo+ # Metadata = lastUpdate,maxRetention,xFilesFactor,archiveCount # ArchiveInfo = Offset,SecondsPerPoint,Points # Data = Archive+ # Archive = Point+ # Point = timestamp,value # # NOTE: the lastUpdate field is deprecated, do not use it! import os, struct, time try: import fcntl CAN_LOCK = True except ImportError: CAN_LOCK = False LOCK = False CACHE_HEADERS = False __headerCache = {} longFormat = "!L" longSize = struct.calcsize(longFormat) floatFormat = "!f" floatSize = struct.calcsize(floatFormat) timestampFormat = "!L" timestampSize = struct.calcsize(timestampFormat) valueFormat = "!d" valueSize = struct.calcsize(valueFormat) pointFormat = "!Ld" pointSize = struct.calcsize(pointFormat) metadataFormat = "!2LfL" metadataSize = struct.calcsize(metadataFormat) archiveInfoFormat = "!3L" archiveInfoSize = struct.calcsize(archiveInfoFormat) debug = startBlock = endBlock = lambda *a,**k: None class WhisperException(Exception): """Base class for whisper exceptions.""" class InvalidConfiguration(WhisperException): """Invalid configuration.""" class InvalidTimeInterval(WhisperException): """Invalid time interval.""" class TimestampNotCovered(WhisperException): """Timestamp not covered by any archives in this database.""" def enableDebug(): global open, debug, startBlock, endBlock class open(file): def __init__(self,*args,**kwargs): file.__init__(self,*args,**kwargs) self.writeCount = 0 self.readCount = 0 def write(self,data): self.writeCount += 1 debug('WRITE %d bytes #%d' % (len(data),self.writeCount)) return file.write(self,data) def read(self,bytes):
self.readCount += 1 debug('READ %d bytes #%d' % (bytes,self.readCount)) return file.read(self,bytes)
def debug(message): print 'DEBUG :: %s' % message __timingBlocks = {} def startBlock(name): __timingBlocks[name] = time.time() def endBlock(name): debug("%s took %.5f seconds" % (name,time.time() - __timingBlocks.pop(name))) def __readHeader(fh): info = __headerCache.get(fh.name) if info: return info #startBlock('__readHeader') originalOffset = fh.tell() fh.seek(0) packedMetadata = fh.read(metadataSize) (lastUpdate,maxRetention,xff,archiveCount) = struct.unpack(metadataFormat,packedMetadata) archives = [] for i in xrange(archiveCount): packedArchiveInfo = fh.read(archiveInfoSize) (offset,secondsPerPoint,points) = struct.unpack(archiveInfoFormat,packedArchiveInfo) archiveInfo = { 'offset' : offset, 'secondsPerPoint' : secondsPerPoint, 'points' : points, 'retention' : secondsPerPoint * points, 'size' : points * pointSize, } archives.append(archiveInfo) fh.seek(originalOffset) info = { #'lastUpdate' : lastUpdate, # Deprecated 'maxRetention' : maxRetention, 'xFilesFactor' : xff, 'archives' : archives, } if CACHE_HEADERS: __headerCache[fh.name] = info #endBlock('__readHeader') return info def __changeLastUpdate(fh): return #XXX Make this a NOP, use os.stat(filename).st_mtime instead startBlock('__changeLastUpdate()') originalOffset = fh.tell() fh.seek(0) #Based on assumption that first field is lastUpdate now = int( time.time() ) packedTime = struct.pack(timestampFormat,now) fh.write(packedTime) fh.seek(originalOffset) endBlock('__changeLastUpdate()') def create(path,archiveList,xFilesFactor=0.5): """create(path,archiveList,xFilesFactor=0.5) path is a string archiveList is a list of archives, each of which is of the form (secondsPerPoint,numberOfPoints) xFilesFactor specifies the fraction of data points in a propagation interval that must have known values for a propagation to occur """ #Validate archive configurations... if not archiveList: raise InvalidConfiguration("You must specify at least one archive configuration!") archiveList.sort(key=lambda a: a[0]) #sort by precision (secondsPerPoint) for i,archive in enumerate(archiveList): if i == len(archiveList) - 1: break next = archiveList[i+1] if not (archive[0] < next[0]): raise InvalidConfiguration("You cannot configure two archives " "with the same precision %s,%s" % (archive,next)) if (next[0] % archive[0]) != 0: raise InvalidConfiguration("Higher precision archives' precision " "must evenly divide all lower precision archives' precision %s,%s" \ % (archive[0],next[0])) retention = archive[0] * archive[1] nextRetention = next[0] * next[1] if not (nextRetention > retention): raise InvalidConfiguration("Lower precision archives must cover " "larger time intervals than higher precision archives %s,%s" \ % (archive,next)) #Looks good, now we create the file and write the header if os.path.exists(path): raise InvalidConfiguration("File %s already exists!" % path) fh = open(path,'wb') if LOCK: fcntl.flock( fh.fileno(), fcntl.LOCK_EX ) lastUpdate = struct.pack( timestampFormat, int(time.time()) ) oldest = sorted([secondsPerPoint * points for secondsPerPoint,points in archiveList])[-1] maxRetention = struct.pack( longFormat, oldest ) xFilesFactor = struct.pack( floatFormat, float(xFilesFactor) ) archiveCount = struct.pack(longFormat, len(archiveList)) packedMetadata = lastUpdate + maxRetention + xFilesFactor + archiveCount fh.write(packedMetadata) headerSize = metadataSize + (archiveInfoSize * len(archiveList)) archiveOffsetPointer = headerSize for secondsPerPoint,points in archiveList: archiveInfo = struct.pack(archiveInfoFormat, archiveOffsetPointer, secondsPerPoint, points) fh.write(archiveInfo) archiveOffsetPointer += (points * pointSize) zeroes = '\x00' * (archiveOffsetPointer - headerSize) fh.write(zeroes) fh.close() def __propagate(fh,timestamp,xff,higher,lower): lowerIntervalStart = timestamp - (timestamp % lower['secondsPerPoint']) lowerIntervalEnd = lowerIntervalStart + lower['secondsPerPoint'] fh.seek(higher['offset']) packedPoint = fh.read(pointSize) (higherBaseInterval,higherBaseValue) = struct.unpack(pointFormat,packedPoint) if higherBaseInterval == 0: higherFirstOffset = higher['offset'] else: timeDistance = lowerIntervalStart - higherBaseInterval pointDistance = timeDistance / higher['secondsPerPoint'] byteDistance = pointDistance * pointSize higherFirstOffset = higher['offset'] + (byteDistance % higher['size']) higherPoints = lower['secondsPerPoint'] / higher['secondsPerPoint'] higherSize = higherPoints * pointSize relativeFirstOffset = higherFirstOffset - higher['offset'] relativeLastOffset = (relativeFirstOffset + higherSize) % higher['size'] higherLastOffset = relativeLastOffset + higher['offset'] fh.seek(higherFirstOffset) if higherFirstOffset < higherLastOffset: #we don't wrap the archive seriesString = fh.read(higherLastOffset - higherFirstOffset) else: #We do wrap the archive higherEnd = higher['offset'] + higher['size'] seriesString = fh.read(higherEnd - higherFirstOffset) fh.seek(higher['offset']) seriesString += fh.read(higherLastOffset - higher['offset']) #Now we unpack the series data we just read byteOrder,pointTypes = pointFormat[0],pointFormat[1:] points = len(seriesString) / pointSize seriesFormat = byteOrder + (pointTypes * points) unpackedSeries = struct.unpack(seriesForm
beeftornado/sentry
src/sentry/integrations/jira/client.py
Python
bsd-3-clause
8,782
0.001822
from __future__ import absolute_import import datetime import jwt import re import logging from six.moves.urllib.parse import parse_qs, urlparse, urlsplit from sentry.integrations.atlassian_connect import get_query_hash from sentry.shared_integrations.exceptions import ApiError from sentry.integrations.client import ApiClient from sentry.utils.http import absolute_uri logger = logging.getLogger("sentry.integrations.jira") JIRA_KEY = "%s.jira" % (urlparse(absolute_uri()).hostname,) ISSUE_KEY_RE = re.compile(r"^[A-Za-z][A-Za-z0-9]*-\d+$") class JiraCloud(object): """ Contains the jira-cloud specifics that a JiraClient needs in order to communicate with jira """ def __init__(self, shared_secret): self.shared_secret = shared_secret @property def cache_prefix(self): return "sentry-jira-2:" def request_hook(self, method, path, data, params, **kwargs): """ Used by Jira Client to apply the jira-cloud authentication """ # handle params that are already part of the path url_params = dict(parse_qs(urlsplit(path).query)) url_params.update(params or {}) path = path.split("?")[0] jwt_payload = { "iss": JIRA_KEY, "iat": datetime.datetime.utcnow(), "exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=5 * 60), "qsh": get_query_hash(path, method.upper(), url_params), } encoded_jwt = jwt.encode(jwt_payload, self.shared_secret) params = dict(jwt=encoded_jwt, **(url_params or {})) request_spec = kwargs.copy() request_spec.update(dict(method=method, path=path, data=data, params=params)) return request_spec def user_id_field(self):
""" Jira-Cloud requires GDPR compliant API usage so we have to use accountId """ return "accountId" def user_query_param(self): """ Jira-Cloud requires GDPR compliant API usage so we have to use query """ return "query
" def user_id_get_param(self): """ Jira-Cloud requires GDPR compliant API usage so we have to use accountId """ return "accountId" class JiraApiClient(ApiClient): # TODO: Update to v3 endpoints COMMENTS_URL = "/rest/api/2/issue/%s/comment" COMMENT_URL = "/rest/api/2/issue/%s/comment/%s" STATUS_URL = "/rest/api/2/status" CREATE_URL = "/rest/api/2/issue" ISSUE_URL = "/rest/api/2/issue/%s" META_URL = "/rest/api/2/issue/createmeta" PRIORITIES_URL = "/rest/api/2/priority" PROJECT_URL = "/rest/api/2/project" SEARCH_URL = "/rest/api/2/search/" VERSIONS_URL = "/rest/api/2/project/%s/versions" USERS_URL = "/rest/api/2/user/assignable/search" USER_URL = "/rest/api/2/user" SERVER_INFO_URL = "/rest/api/2/serverInfo" ASSIGN_URL = "/rest/api/2/issue/%s/assignee" TRANSITION_URL = "/rest/api/2/issue/%s/transitions" EMAIL_URL = "/rest/api/3/user/email" integration_name = "jira" # This timeout is completely arbitrary. Jira doesn't give us any # caching headers to work with. Ideally we want a duration that # lets the user make their second jira issue with cached data. cache_time = 240 def __init__(self, base_url, jira_style, verify_ssl, logging_context=None): self.base_url = base_url # `jira_style` encapsulates differences between jira server & jira cloud. # We only support one API version for Jira, but server/cloud require different # authentication mechanisms and caching. self.jira_style = jira_style super(JiraApiClient, self).__init__(verify_ssl, logging_context) def get_cache_prefix(self): return self.jira_style.cache_prefix def request(self, method, path, data=None, params=None, **kwargs): """ Use the request_hook method for our specific style of Jira to add authentication data and transform parameters. """ request_spec = self.jira_style.request_hook(method, path, data, params, **kwargs) if "headers" not in request_spec: request_spec["headers"] = {} # Force adherence to the GDPR compliant API conventions. # See # https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide request_spec["headers"]["x-atlassian-force-account-id"] = "true" return self._request(**request_spec) def user_id_get_param(self): return self.jira_style.user_id_get_param() def user_id_field(self): return self.jira_style.user_id_field() def user_query_param(self): return self.jira_style.user_query_param() def get_issue(self, issue_id): return self.get(self.ISSUE_URL % (issue_id,)) def search_issues(self, query): # check if it looks like an issue id if ISSUE_KEY_RE.match(query): jql = 'id="%s"' % query.replace('"', '\\"') else: jql = 'text ~ "%s"' % query.replace('"', '\\"') return self.get(self.SEARCH_URL, params={"jql": jql}) def create_comment(self, issue_key, comment): return self.post(self.COMMENTS_URL % issue_key, data={"body": comment}) def update_comment(self, issue_key, comment_id, comment): return self.put(self.COMMENT_URL % (issue_key, comment_id), data={"body": comment}) def get_projects_list(self): return self.get_cached(self.PROJECT_URL) def get_project_key_for_id(self, project_id): if not project_id: return "" projects = self.get_projects_list() for project in projects: if project["id"] == project_id: return project["key"].encode("utf-8") return "" def get_create_meta_for_project(self, project): params = {"expand": "projects.issuetypes.fields", "projectIds": project} metas = self.get_cached(self.META_URL, params=params) # We saw an empty JSON response come back from the API :( if not metas: logger.info( "jira.get-create-meta.empty-response", extra={"base_url": self.base_url, "project": project}, ) return None # XXX(dcramer): document how this is possible, if it even is if len(metas["projects"]) > 1: raise ApiError(u"More than one project found matching {}.".format(project)) try: return metas["projects"][0] except IndexError: logger.info( "jira.get-create-meta.key-error", extra={"base_url": self.base_url, "project": project}, ) return None def get_versions(self, project): return self.get_cached(self.VERSIONS_URL % project) def get_priorities(self): return self.get_cached(self.PRIORITIES_URL) def get_users_for_project(self, project): # Jira Server wants a project key, while cloud is indifferent. project_key = self.get_project_key_for_id(project) return self.get_cached(self.USERS_URL, params={"project": project_key}) def search_users_for_project(self, project, username): # Jira Server wants a project key, while cloud is indifferent. project_key = self.get_project_key_for_id(project) return self.get_cached( self.USERS_URL, params={"project": project_key, self.user_query_param(): username} ) def search_users_for_issue(self, issue_key, email): return self.get_cached( self.USERS_URL, params={"issueKey": issue_key, self.user_query_param(): email} ) def get_user(self, user_id): user_id_get_param = self.user_id_get_param() return self.get_cached(self.USER_URL, params={user_id_get_param: user_id}) def create_issue(self, raw_form_data): data = {"fields": raw_form_data} return self.post(self.CREATE_URL, data=data) def get_server_info(self): return self.get(self.SERVER_INFO_URL) def get_valid_statuses(self): return self.get_cached(self.STATUS_URL) def get_tra
ScenK/Dev_Blog2
blog/dispatcher/__init__.py
Python
bsd-3-clause
18,473
0.000109
# -*- coding: utf-8 -*- import json import datetime import PyRSS2Gen from werkzeug.security import generate_password_hash from mongoengine.errors import NotUniqueError, ValidationError from flask import make_response from tasks.email_tasks import send_email_task from config import Config from model.models import (User, Diary, Category, Page, Tag, Comment, CommentEm, StaticPage) from utils.helper import SiteHelpers class UserDispatcher(object): """User dispatcher. Return author profile """ def get_profile(self): """Return User object.""" return User.objects.first() def generate_user(self, username, password): """Generate User""" user = User(name=username) user.password = generate_password_hash(password=password) return user.save() def
delete_user(self): """Delete User""" return User.objects().first().delete() def get_by_name(self, username): "
""Get user by username Args: username: string Return: user: user object """ return User.objects(name=username).first() class CommentDispatcher(object): """Comment dispatcher. Retuen comments functons helper. """ def add_comment(self, author, diary_id, email, content): diary = Diary.objects(pk=diary_id) diary_title = diary.first().title comment_em = CommentEm( author=author, content=content, email=email ) diary.update_one(push__comments=comment_em) comment = Comment(content=content) comment.diary = diary.first() comment.email = email comment.author = author comment.save(validate=False) try: send_email_task(Config.EMAIL, Config.MAIN_TITLE + u'收到了新的评论, 请查收', content, diary_id, author, diary_title) response = make_response(json.dumps({'success': 'true'})) response.set_cookie('guest_name', author) response.set_cookie('guest_email', email) return response except Exception as e: return str(e) def reply_comment(self, author, diary_id, email, content): diary = Diary.objects(pk=diary_id) diary_title = diary.first().title comment_em = CommentEm( author=u'博主回复', content=content, ) diary.update_one(push__comments=comment_em) ''' Save in Comment model for admin manage''' comment = Comment(content=content) comment.diary = diary.first() comment.author = UserDispatcher().get_profile().name comment.save(validate=False) try: send_email_task(email, u'您评论的文章《' + diary_title + u'》收到了来自\ 博主的回复, 请查收', content, diary_id, author, diary_title) return json.dumps({'success': 'true'}) except Exception as e: return json.dumps({'success': 'false', 'reason': str(e)}) def get_all_comments(self, order='-publish_time'): """Return Total diaries objects.""" return Comment.objects.order_by(order) def del_comment_by_id(self, comment_id): """Comment delete by id. Also remove comment from diary detail Args: comment_id: Object_id. Return: None """ comment = Comment.objects.get_or_404(pk=comment_id) diary = Diary.objects(pk=comment.diary.pk) diary.update_one(pull__comments={'content': comment.content}) return comment.delete() def get_comment_list(self, start=0, end=20, order='-publish_time'): """Comment list. default query 20 comments and return if there should be next or prev page. Args: start: num defalut 0 end: num defalut 20 order: str defalut '-publish_time' Return: next: boolean prev: boolean comments: diaries list """ size = end - start prev = next = False comments = Comment.objects.order_by(order)[start:end + 1] if len(comments) - size > 0: next = True if start != 0: prev = True return prev, next, comments[start:end] class DiaryDispatcher(object): """ Diary dispatcher. Return diary collection objects. """ def get_all_diaries(self, order='-publish_time'): """Return Total diaries objects.""" return Diary.objects.order_by(order) def get_by_id(self, diary_id): """Diary detail. Only return diary detail by diary_id. Args: diary_id: objectID Return: diary: diary object """ try: diary = Diary.objects(pk=diary_id).first() except ValidationError: diary = None return diary def get_diary_width_navi(self, diary_id): """Diary Detail Width page navi boolean. get diary detail and if there should be prev or next page. Args: diary_id: objectID Return: diary: diary object prev: boolean, can be used as 'prev' logic next: boolean, can be used as 'next' logic """ prev = next = True diary = self.get_by_id(diary_id) if diary == self.get_first_diary(): next = False if diary == self.get_last_diary(): prev = False return prev, next, diary def get_first_diary(self): """Return First Diary object.""" return Diary.objects.order_by('-publish_time').first() def get_last_diary(self): """Return Last Diary object.""" return Diary.objects.order_by('publish_time').first() def get_prev_diary(self, pub_time): """Return Previous Diary object.""" return Diary.objects(publish_time__lt=pub_time ).order_by('-publish_time').first() def get_next_diary(self, pub_time): """Return Next Diary object.""" return Diary.objects(publish_time__gt=pub_time ).order_by('-publish_time').first() def get_next_or_prev_diary(self, prev_or_next, diary_id): """Diary route prev or next function. Use publish_time to determin what`s the routed diary. Args: prev_or_next: string 'prev' or 'next' diary_id: objectID Return: next_diary: routed diary object """ diary = self.get_by_id(diary_id) if prev_or_next == 'prev': next_diary = self.get_prev_diary(diary.publish_time) else: next_diary = self.get_next_diary(diary.publish_time) return next_diary def get_diary_count(self): """Return Diaries total number.""" return Diary.objects.count() def get_diary_list(self, start=0, end=10, order='-publish_time'): """Diary list. default query 10 diaries and return if there should be next or prev page. Args: start: num defalut 0 end: num defalut 10 order: str defalut '-publish_time' Return: next: boolean prev: boolean diaries: diaries list """ size = end - start prev = next = False diaries = Diary.objects.order_by(order)[start:end + 1] if len(diaries) - size > 0: next = True if start != 0: prev = True return prev, next, diaries[start:end] def edit_diary(self, diary_id, title, html, category, tags): """ Edit diary from admin receives title, content(html), tags and cagetory save title, content(html), pure content(further use), tags and cagetory also auto save author as current_user. this method will auto save new Category or Tag if not exist otherwise save in existed none with push only diary_object Args: diary_id: diary_id
pcmoritz/ray-1
python/ray/tune/tests/test_tune_server.py
Python
apache-2.0
5,470
0
import os import requests import socket import subprocess import unittest import json import ray from ray.rllib import _register_all from ray.tune.trial import Trial, Resources from ray.tune.web_server import TuneClient from ray.tune.trial_runner import TrialRunner def get_valid_port(): port = 4321 while True: try: print("Trying port", port) port_test_socket = socket.socket() port_test_socket.bind(("127.0.0.1", port)) port_test_socket.close() break except socket.error: port += 1 return port class TuneServerSuite(unittest.TestCase): def basicSetup(self): # Wait up to five seconds for placement groups when starting a trial os.environ["TUNE_PLACEMENT_GROUP_WAIT_S"] = "5" # Block for results even when placement groups are pending os.environ["TUNE_TRIAL_STARTUP_GRACE_PERIOD"] = "0" ray.init(num_cpus=4, num_gpus=1) port = get_valid_port() self.runner = TrialRunner(server_port=port) runner = self.runner kwargs = { "stopping_criterion": { "training_iteration": 3 }, "resources": Resources(cpu=1, gpu=1), } trials = [Trial("__fake", **kwargs), Trial("__fake", **kwargs)] for t in trials: runner.add_trial(t) client = TuneClient("localhost", port) return runner, client def tearDown(self): print("Tearing down....") try: self.runner._server.shutdown() self.runner = None except Exception as e: print(e) ray.shutdown() _register_all() def testAddTrial(self): runner, client = self.basicSetup() for i in range(3): runner.step() spec = { "run": "__fake", "stop": { "training_iteration": 3 }, "resources_per_trial": { "cpu": 1, "gpu": 1 }, } client.add_trial("test", spec) runner.step() all_trials = client.get_all_trials()["trials"] runner.step() self.assertEqual(len(all_trials), 3) def testGetTrials(self): runner, client = self.basicSetup() for i in range(3): runner.step() all_trials = client.get_all_trials()["trials"] self.assertEqual(len(all_trials), 2) tid = all_trials[0]["id"] client.get_trial(tid) runner.step() self.assertEqual(len(all_trials), 2) def testGetTrialsWithFunction(self): runner, client = self.basicSetup() test_trial = Trial( "__fake", trial_id="function_trial", stopping_criterion={"training_iteration": 3}, config={"callbacks": { "on_episode_start": lambda x: None }}) runner.add_trial(test_trial) for i in range(3): runner.step() all_trials = client.get_all_trials()["trials"] self.assertEqual(len(all_trials), 3) client.get_trial("function_trial") runner.step() self.assertEqual(len(all_trials), 3) def testStopTrial(self): """Check if Stop Trial works.""" runner, client = self.basicSetup() for i in range(2): runner.step() all_trials = client.get_all_trials()["trials"] self.assertEqual( len([t for t in all_t
rials if t["status"] == Trial.RUNNING]), 1)
tid = [t for t in all_trials if t["status"] == Trial.RUNNING][0]["id"] client.stop_trial(tid) runner.step() all_trials = client.get_all_trials()["trials"] self.assertEqual( len([t for t in all_trials if t["status"] == Trial.RUNNING]), 0) def testStopExperiment(self): """Check if stop_experiment works.""" runner, client = self.basicSetup() for i in range(2): runner.step() all_trials = client.get_all_trials()["trials"] self.assertEqual( len([t for t in all_trials if t["status"] == Trial.RUNNING]), 1) client.stop_experiment() runner.step() self.assertTrue(runner.is_finished()) self.assertRaises( requests.exceptions.ReadTimeout, lambda: client.get_all_trials(timeout=1)) def testCurlCommand(self): """Check if Stop Trial works.""" runner, client = self.basicSetup() for i in range(2): runner.step() stdout = subprocess.check_output( "curl \"http://{}:{}/trials\"".format(client.server_address, client.server_port), shell=True) self.assertNotEqual(stdout, None) curl_trials = json.loads(stdout.decode())["trials"] client_trials = client.get_all_trials()["trials"] for curl_trial, client_trial in zip(curl_trials, client_trials): self.assertEqual(curl_trial.keys(), client_trial.keys()) self.assertEqual(curl_trial["id"], client_trial["id"]) self.assertEqual(curl_trial["trainable_name"], client_trial["trainable_name"]) self.assertEqual(curl_trial["status"], client_trial["status"]) if __name__ == "__main__": import pytest import sys sys.exit(pytest.main(["-v", __file__]))
todddeluca/tfd
tfd/fasta.py
Python
mit
21,889
0.004203
#!/usr/bin/env python ''' What is a FASTA format file/string? This module follows the NCBI conventions: http://blast.ncbi.nlm.nih.gov/blastcgihelp.shtml ''' import cStringIO import math def idFromName(line): ''' line: a fasta nameline returns: an id parsed from the fasta nameline. The id is the first whitespace separated token after an optional namespace, etc. See the examples below. This covers a lot of cases that a sane person would put on a nameline. So needless to say it covers very few cases. Examples in the form nameline => return value: id => id id desc => id >id => id >id desc => id >ns|id => id >ns|id desc => id >ns|id| => id >ns|id|desc => id ns|id => id ns|id desc => id ns|id| => id ns|id|desc => id ns|id blah|desc => id Example namelines not covered: JGI-PSF GENOMES ftp://ftp.jgi-psf.org/pub/JGI_data/Nematostella_vectensis/v1.0/annotation/proteins.Nemve1FilteredModels1.fasta.gz >jgi|Nemve1|18|gw.48.1.1 >jgi|Nemve1|248885|estExt_fgenesh1_pg.C_76820001 ''' # This could probably be done with one regex, but I am too stupid and this way I can read it. # remove the leading '>' if there is one. if line.startswith('>'): line = line[1:] # keep only everything after the first pipe. will keep everything if there is no first pipe. pipe = line.find('|') if pipe > -1: line = line[line.find('|')+1:] # keep everything before the second pipe. will keep everything if there is no second pipe. pipe = line.find('|') if pipe > -1: line = line[:pipe] # return the first token as the id. return line.split()[0] def prettySeq(seq, n=60): ''' seq: one long bare (no nameline) sequence. e.g. MASNTVSAQGGSNRPVRDFSNIQDVAQFLLFDPIWNEQPGSIVPWKMNREQALAERYPELQTSEPSEDYSGPVESLELLPLEIKLDIMQYLSWEQISWCKHPWLWTRWYKDNVVRVSAITFED n: maximum length of sequence lines returns: seq split over multiple lines, all terminated by newlines. ''' if len(seq) == 0: raise Exception('zero-length sequence', seq) seq = ''.join(seq.strip().split()) chunks = int(math.ceil(len(seq)/float(n))) pretty = '' for i in range(chunks): pretty += seq[i*n:(i+1)*n] + '\n' return pretty def numSeqsInFastaDb(path): num = 0 with open(path) as fh: for line in fh: if line.startswith('>'): num += 1 return num def readIds(fastaFile): ''' fastaFile: a file-like object or a path to a fasta file Yields each id in each nameline in each sequence in the fasta file. ''' for nameline in readNamelines(fastaFile): yield idFromName(nameline) def readNamelines(fastaFile): ''' fastaFile: a file-like object or a path to a fasta file Yields each nameline in each sequence in the fasta file. ''' for nameline, seq in readFasta(fastaFile): yield nameline def readFasta(fastaFile): ''' fastaFile: a file-like object or a path to a fasta file Yields a tuple of (nameline, sequence) for each sequence in the fasta file. Newlines are stripped from the nameline and sequence lines, and the sequence lines are concatenated into one long sequence string. Here is an examle (edited for length): ('>sp|P31946|1433B_HUMAN', 'MTMDKSELVQKAKLAEQAERYDDMAAAMKAVTEQGHELSNEERNLLSVAYKNVVGARRSSWRVISSIEQKT') ''' for lines in readFastaLines(fastaFile): nameline = lines[0].strip() seq = ''.join((l.strip() for l in lines[1:])) yield nameline, seq def readFastaLines(fastaFile): ''' fastaFile: a file-like object or a path to a fasta file yields: a seq of fasta sequence lines for each sequence in the fasta file. the first line is the nameline. the other lines are the sequence data lines. lines include newlines. ''' if isinstance(fastaFile, basestring): with open(fastaFile) as fh: for lines in relaxedFastaSeqIter(fh): yield lines else: for lines in relaxedFastaSeqIter(fastaFile): yield lines def splitSeq(seq): ''' seq: a well-formed fasta sequence string containing a single nameline, including '>' and sequence data lines. returns: tuple of nameline, including '>', without a newline, and concatenated sequence lines, without newlines e.g. ['>blahname', 'AFADFDSAFAFAFAFFAFAF'] ''' lines = seq.splitlines() name = lines[0].strip() chars = ''.join([l.strip() for l in lines[1:]]) return [name, chars] def relaxedFastaSeqIter(filehandle): ''' Parse the lines in filehandle, first removing any blank lines, and then yielding all well-formed fasta sequences and ignoring badly-formed seqs. A well-formed sequence has exactly one nameline followed by one or more sequence lines. Well-formed example: >sp|P27348|1433T_HUMAN MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR EGAEN Badly-formed example (no nameline): MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR EGAEN Badly-formed example (no sequence lines):
>sp|P27348|1433T_HUMAN ''' # lines guaranteed to have no blank lines by filterBlanks() # lines guaranteed to have have exactly one nameline as the first # element (except possibly the first l
ines yielded, which might not # have a nameline if the filehandle starts with a sequence line). for lines in splitFastaOnNamelines(filterBlanks(filehandle)): if lines[0][0] == '>' and len(lines) >= 2: yield lines def filterBlanks(lines): ''' Yield each line in lines that contains non-whitespace characters. Used to remove blank lines from FASTA files. ''' for line in lines: if line.strip(): yield line def splitFastaOnNamelines(filehandle): ''' Split the lines in filehandle on namelines. Yields a seq of lines, where the first line in the seq is a nameline (except if filehandle starts with a non-nameline) and the other lines are lines until the next nameline or the end of the file. Lines include newlines. The seq of lines will always contain at least one line. Only the first line will ever be a nameline. Example input (note that this is not well-formed fasta, since it starts with a sequence line, has a nameline with no sequence lines, and has blank lines within a sequence): VLSSIEQKSNEEGSEEKGPEVREYREKVETELQGVCDTVLGLLDSHLIKEAGDAESRVFY >sp|P31947|1433S_HUMAN >sp|P27348|1433T_HUMAN MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR EGAEN >sp|P63104|1433Z_HUMAN MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE Example yielded output (note how every sequence except the first starts with a nameline): yield ['VLSSIEQKSNEEGSEEKGPEVREYREKVETELQGVCDTVLGLLDSHLIKEAGDAESRVFY\n'] yield ['>sp|P31947|1433S_HUMAN\n'] yield ['>sp|P27348|1433T_HUMAN\n', 'MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR\n', '\n', 'EGAEN\n', '\n'] yield ['>sp|P63104|1433Z_HUMAN\n', '\n', 'MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR\n', 'MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE\n'] Well-formed example input (note how the first line of the input is a nameline): >sp|P27348|1433T_HUMAN MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR EGAEN >sp|P63104|1433Z_HUMAN MDKNELVQKAKLAEQAERYDDMAACMKSVTEQGAELSNEERNLLSVAYKNVVGARRSSWR MKGDYYRYLAEVAAGDDKKGIVDQSQQAYQEAFEISKKEMQPTHPIRLGLALNFSVFYYE Well-formed example output (note how the first element of the first yielded list is a nameline): yield ['>sp|P27348|1433T_HUMAN\n', 'MEKTELIQKAKLAEQAERYDDMATCMKAVTEQGAELSNEERNLLSVAYKNVVGGRRSAWR\n', 'EGAEN\n'] yield ['>sp|P63104|1433Z_HUMAN\n', 'MDKNELV
fmierlo/django-default-settings
release/1.2/project/settings.py
Python
bsd-3-clause
3,293
0.002126
# Django settings for project project. DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS
= ( # ('Your Name', 'your_email@domain.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '',
# Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True # Absolute path to the directory that holds media. # Example: "/home/media/media.lawrence.com/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash if there is a path component (optional in other cases). # Examples: "http://media.lawrence.com", "http://example.com/media/" MEDIA_URL = '' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". ADMIN_MEDIA_PREFIX = '/media/' # Make this unique, and don't share it with anybody. SECRET_KEY = '01234567890123456789012345678901234567890123456789' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) ROOT_URLCONF = 'project.urls' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', # Uncomment the next line to enable the admin: # 'django.contrib.admin', )
wallaceicy06/Eligibility
eligibility.py
Python
mit
5,750
0.003304
import argparse import random import time import sys parser = argparse.ArgumentParser('eligibility') parser.add_argument('infile', \ help='the file containing the list of names to randomly sort') parser.add_argument('-s', '--spots', metavar='num', required=True, type=int, \ help='the number of spots available on campus') parser.add_argument('outfile', \ help='the file to output the results to') parser.add_argument('-d', '--delay', metavar='seconds', required=False, \ type=float, default=0.5, help='the delay between selections in ' 'decimal seconds (0.5 by default)') MCM_CREST = """ `/:. .:+. `///. McMURTRY COLLEGE .///. `` .///. .///- `` -///. .-:////+ooo+/:-. .///-``` -///. .:oyhhdyyddshydhydhhyo:. .///-```` `-/++sydyysddshhdsshddoyddddhys++/-```` `/hhhsyhsyhddyyyhhysshdddhsdhdhh/``` .shdssyohhhyo//+////+//+yhhhyyosyys- :yyyyy . + /+sssyo . yyyhy: :hhsyh .sdho:+sdddddm+ os . hhhhh: .hdsyyh `oddyoyymdmmdds ydd/``-:hyhh
dy. ohhdhhd `.:ddmdddddddd- + o-. hdssyy/ `hdhyyhh -`-ymmmddddms..s-- hdhhhdh. -hdyhydh /o:/mdmdmmdy: :h+ hyy
yhhh: -hdshydd /ymddhhhoydhohy/:+h dhyyyhh: `hdsyyddo /s+o-+hhhdmddddooy +ddysydh. sdhhhddh/ ` +ddd+sdddy/+/ yddhyyh+` .hdhyyyyys: .oyoydddo-+ddhs/. +ydddhyy- +hsyhhddho` :yhodoo+yssddds. sddyyyhh/ +yyddhhdddy.`.-:/::+ymdhs:`` +hddhyhyy/ :-``/shddddddddyo+/+oso+s++ooosdddhyhddy:```-: -oo::/+shdddddddddddddddddhdddddhyhdhyo///:+o: `sdhs-``/ydddhdddddddhhddddddhyhdhs:``-ohds.-. `+hdy:+o- `:ohhddddddddddddddyhhho. -o+:yho+.` `:hdy: -o. -/oyhdddddddhyso:. `o- :ydh:` `oyds- :hydddhoy: -omyo. -yh+ -yyhs:+yy: +hh- sys///ss` `+osso+` """ def welcome(spots): """ Prints the McMurtry crest to stdout. Returns when the user confirms the start of the program by typing any key. Arguments: spots - the number of spots that the program will allocate for housing Returns: none """ print MCM_CREST print 'Welcome to McMurtry College Eligibility Jack.' print 'This program will randomly allocate ' + str(spots) \ + ' spots for housing.' print 'Hit any key to begin...' raw_input('') def run_eligibility(names_file, spots, delay=0.5): """ Randomly sorts the provided names into two lists, one that is receiving housing and another that is a housing wait list. The number of spots for the former is determined by the variable passed to the function. Arguments: names_file - the path of the file containing a line separated list of names spots - the number of spots to allocate for housing delay (optional) - the delay between successive picks, default is 0.5 Returns: the list of students who were picked for on campus housing; the list of students (in order) who were picked to be on the wait list """ on_campus = [] waitlist = [] try: with open(names_file, 'r') as names_f: lines = names_f.readlines(); names = map(lambda l: l.rstrip('\n'), lines); if spots > len(names): print >> sys.stderr, 'Number of spots greater than names ' + \ 'list. No need for eligibility jack.' sys.exit(-1) print 'Receiving on campus housing:\n' num = 1 while names: name = random.choice(names) names.remove(name) time.sleep(delay) if num > spots: print str(num - spots) + ': ' + name waitlist.append(name) else: print str(num) + ': ' + name on_campus.append(name) if num == spots: print '\nHousing Waitlist:\n' num += 1 except IOError: print >> sys.stderr, 'There was an error opening the specified' + \ ' file \'' + names_file +'\' for read.' return on_campus, waitlist def write_results(out_file, on_campus, waitlist): """ Writes the specified lists of students to a file in the same format that run_eligibility prints to stdout. Arguments: out_file - the path of the file to write the results to on_campus - the list of students selected for on-campus housing waitlist - the list of students (in order) who were selected for the wait list Returns: none """ try: with open(out_file, 'w') as out_f: out_f.write('Receiving on campus housing:\n') for name_i in xrange(len(on_campus)): out_f.write(str(name_i + 1) + ': ' + on_campus[name_i] + '\n') out_f.write('\nHousing Waitlist:\n') for name_i in xrange(len(waitlist)): out_f.write(str(name_i + 1) + ': ' + waitlist[name_i] + '\n') except IOError: print >> sys.stderr, 'There was an error opening the specified' + \ ' file \'' + out_file +'\' for write.' # Main runner for the program. if __name__ == '__main__': args = parser.parse_args(); welcome(args.spots) oc, wl = run_eligibility(args.infile, args.spots, args.delay) write_results(args.outfile, oc, wl)
Senseg/robotframework
atest/robot/output/html_output_stats.py
Python
apache-2.0
1,194
0.005863
from __future__ import with_statement from robot.api import logger class WrongStat(AssertionError): ROBOT_CONTINUE_ON_FAILURE = True def get_total_stats(path): return get_all_stats(path)[0] def get_tag_stats(path): return get_all_stats(path)[1] def get_suite_stats(path): return get_all_stat
s(path)[2] def get_all_stats(path): logger.info('Getting stats from <a href="file://%s">%s</a>' % (path, path), html=True) stats_line = _get_stats_line(path) logger.debug('Stats line: %s' % stats_line) total, tags, suite = eval(stats_line) return total, tags, suite def _get_stats_line(path): prefix = 'window.output["stats"] = ' with open(path) as file: for line in file: if line.startswith(pr
efix): return line[len(prefix):-2] def verify_stat(stat, *attrs): expected = dict(_get_expected_stat(attrs)) if stat != expected: raise WrongStat('\n%-9s: %s\n%-9s: %s' % ('Got', stat, 'Expected', expected)) def _get_expected_stat(attrs): for key, value in (a.split(':', 1) for a in attrs): value = int(value) if value.isdigit() else str(value) yield str(key), value
DigitalCampus/django-nurhi-oppia
oppia/api/media.py
Python
gpl-3.0
1,856
0.001078
# oppia/api/media.py from django.conf import settings from django.contrib.auth import authenticate from django.
http import HttpResponseRedirect, Http404, HttpResponse, JsonResponse from django.utils.translation import ugettext_lazy as _ from django.views.decorators.csrf import csrf_exempt from django.contrib import messages from oppia.api.publish import get_messages_array from oppia.av.models import UploadedMedia from oppia.av import handler @csrf_exempt def upload_view(request): # get the messages to clear possible previous unprocessed me
ssages get_messages_array(request) if request.method != 'POST': return HttpResponse(status=405) required = ['username', 'password'] validation_errors = [] for field in required: if field not in request.POST: validation_errors.append("field '{0}' missing".format(field)) # authenticate user username = request.POST.get("username") password = request.POST.get("password") user = authenticate(username=username, password=password) if user is None or not user.is_active: messages.error(request, "Invalid username/password") response_data = { 'message': _('Authentication errors'), 'messages': get_messages_array(request) } return JsonResponse(response_data, status=401) if validation_errors: return JsonResponse({'errors': validation_errors}, status=400, ) result = handler.upload(request, user) if result['result'] == UploadedMedia.UPLOAD_STATUS_SUCCESS: media = result['media'] embed_code = media.get_embed_code(request.build_absolute_uri(media.file.url)) return JsonResponse({'embed_code': embed_code}, status=201) else: response = {'messages': result['errors']} return JsonResponse(response, status=400)
Vauxoo/server-tools
datetime_formatter/__manifest__.py
Python
agpl-3.0
666
0
# Copyright 2015, 2017 Jairo Llopis <jairo.llopis@tecnativa.com> # Copyright 2016 Tecnativa, S.L. - Vicent Cubells # Copyright 2018 Camptocamp SA # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Date & Time Formatter", "summary": "Helper functions to give correct format
to date[time] fields", "version": "12.0.1.0.0", "category": "Tools", "website": "https://github.com/OCA/server-tools", "author": "Grupo ESOC Ingeniería de Servicios, " "Tecnativa, " "Odoo Community Association (OCA)", "license": "AGPL-3", "installable": True, "depends": [ "base", ], }
knxd/pKNyX
pyknyx/core/dptXlator/dptXlatorDate.py
Python
gpl-3.0
4,000
0.002255
# -*- coding: utf-8 -*- """ Python KNX framework License ======= - B{PyKNyX} (U{https://github.com/knxd/pyknyx}) is Copyright: - © 2016-2017 Matthias Urlichs - PyKNyX is a fork of pKNyX - © 2013-2015 Frédéric Mantegazza This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA or see: - U{http://www.gnu.org/licenses/gpl.html} Module purpose ============== Datapoint Types management. Implements ========== - B{DPTXlatorDate} Usage ===== see L{DPTXlatorBoolean} Note ==== KNX century encoding is as following: - if byte year >= 90, then real year is 20th century year - if byte year is < 90, then real year is 21th century year Python time module does not encode century the same way: - if byte year >= 69, then real year is 20th century year - if byte year is < 69, then real year is 21th century year The DPTXlatorDate class follows the python encoding. @author: Frédéric Mantegazza @author: B. Malinowsky @copyright: (C) 2013-2015 Frédéric Mantegazza @copyright: (C) 2006, 2011 B. Malinowsky @license: GPL """ import struct from pyknyx.services.logger import logging; logger = logging.getLogger(__name__) from pyknyx.core.dptXlator.dptId import DPTID from pyknyx.core.dptXlator.dpt import DPT from pyknyx.core.dptXlator.dptXlatorBase import DPTXlatorBase, DPTXlatorValueError class DPTXlatorDate(DPTXlatorBase): """ DPTXlator class for Date (r3U5r4U4r1U7) KNX Datapoint Type - 3 Byte: rrrDDDDD rrrrMMMM rYYYYYYY - D: Day [1:31] - M: Month [1:12]
- Y: Year [0:99]
- r: reserved (0) . """ DPT_Generic = DPT("11.xxx", "Generic", (0, 16777215)) DPT_Date = DPT("11.001", "Date", ((1, 1, 1969), (31, 12, 2068))) def __init__(self, dptId): super(DPTXlatorDate, self).__init__(dptId, 3) def checkData(self, data): if not 0x000000 <= data <= 0xffffff: raise DPTXlatorValueError("data %s not in (0x000000, 0xffffff)" % hex(data)) def checkValue(self, value): for index in range(3): if not self._dpt.limits[0][index] <= value[index] <= self._dpt.limits[1][index]: raise DPTXlatorValueError("value not in range %s" % repr(self._dpt.limits)) def dataToValue(self, data): day = (data >> 16) & 0x1f month = (data >> 8) & 0x0f year = data & 0x7f if year >= 69: year += 1900 else: year += 2000 value = (day, month, year) #logger.debug("DPTXlatorDate._toValue(): value=%d" % value) return value def valueToData(self, value): day = value[0] month = value[1] year = value[2] if year >= 2000: year -= 2000 else: year -= 1900 data = day << 16 | month << 8 | year #logger.debug("DPTXlatorDate.valueToData(): data=%s" % hex(data)) return data def dataToFrame(self, data): data = [(data >> shift) & 0xff for shift in range(16, -1, -8)] return bytearray(struct.pack(">3B", *data)) def frameToData(self, frame): data = struct.unpack(">3B", frame) data = data[0] << 16 | data[1] << 8 | data[2] return data @property def day(self): return self.value[0] @property def month(self): return self.value[1] @property def year(self): return self.value[2]
AutohomeOps/Assets_Report
api_server/api_server/urls.py
Python
apache-2.0
846
0
"""api_server URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Func
tion views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='ho
me') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from django.contrib import admin version = 'v1.0' urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'api/%s/' % version, include('apis.urls')) ]
dims/glance
glance/version.py
Python
apache-2.0
686
0
# Copyright 2012 OpenStack Foundation # # Licensed
under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or im
plied. See the # License for the specific language governing permissions and limitations # under the License. import pbr.version version_info = pbr.version.VersionInfo('glance')
agarbuno/deepdish
doc/source/codefiles/saveable_example.py
Python
bsd-3-clause
542
0.001845
import deepdish as dd class Foo(dd.util.SaveableRegistry): def __init__(self, x): self.x = x @class
method def load_from_dict(self, d): obj = Foo(d['x']) return obj
def save_to_dict(self): return {'x': self.x} @Foo.register('bar') class Bar(Foo): def __init__(self, x, y): self.x = x self.y = y @classmethod def load_from_dict(self, d): obj = Bar(d['x'], d['y']) return obj def save_to_dict(self): return {'x': self.x, 'y': self.y}
ClemsonSoCUnix/django-sshkey
django_sshkey/models.py
Python
bsd-3-clause
5,547
0.008293
# Copyright (c) 2014-2016, Clemson University # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # * Neither the name of Clemson University nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. from django.db import models from django.contrib.auth.models import User from django.core.exceptions import ValidationError from django.db.models.signals import pre_save from django.dispatch import receiver try: from django.utils.timezone import now except ImportError: import datetime now = datetime.datetime.now from django_sshkey.util import PublicKeyParseError, pubkey_parse from django_sshkey import settings class UserKey(models.Model): user = models.ForeignKey(User, db_index=True) name = models.CharField(max_length=50, blank=True) key = models.TextField(max_length=2000) fingerprint = models.CharField(max_length=128, blank=True, db_index=True) created = models.DateTimeField(auto_now_add=True, null=True) last_modified = models.DateTimeField(null=True) last_used = models.DateTimeField(null=True) class Meta: db_table = 'sshkey_userkey' unique_together = [ ('user', 'name'), ] def __unicode__(self): return unicode(self.user) + u': ' + self.name def clean_fields(self, exclude=None): if not exclude or 'key' not in exclude: self.key = self.key.strip() if not self.key: raise ValidationError({'key': ["This field is required."]}) def clean(self): self.key = self.key.strip() if not self.key: return try: pubkey = pubkey_parse(self.key) except PublicKeyParseError as e: raise ValidationError(str(e)) self.key = pubkey.format_openssh() self.fingerprint = pubkey.fingerprint() if not self.name: if not pubkey.comment: raise ValidationError('Name or key comment required') self.name = pubkey.comment def validate_unique(self, exclude=None): if self.pk is None: objects = type(self).objects else: objects = type(self).objects.exclude(pk=self.pk) if exclude is None or 'name' not in exclude: if objects.filter(user=self.user, name=self.name).count(): message = 'You already have a key with that name' raise ValidationError({'name': [message]}) if exclude is None or 'key' not in exclude: try: other = objects.get(fingerprint=self.fingerprint, key=self.key) if self.user == other.user: message = 'You already have that key on file (%s)' % other.name else: message = 'Somebody else already has that key on file' raise ValidationError({'key': [message]}) except type(self).DoesNotExist: pass def export(self, format='RFC4716'): pubkey = pubkey_parse(self.key) f = format.upper() if f == 'RFC4716': return pubkey.format_rfc4716() if f == 'PEM': return pubkey.format_pem() raise ValueError("Invalid format") def save(self, *args, **kwargs): if kwargs.pop('update_last_modified', True): self.last_modified = now() super(UserKey, self).save(*args, **kwargs) def touch(self): self.last_used = now() self.save(update_last_modified=False) @receiver(pre_save, sender=UserKey) def send_email_add_key(sender, instance, **kwargs): if not settings.SSHKEY_EMAIL_ADD_KEY or instance.pk: return from django.template.loader import render_to_string from django.core.mail import EmailMultiAlternatives from django.core.urlresolvers import reverse context_dict = { 'key': instance, 'subject': settings.SSHKEY_EMAIL_ADD_KEY_SUBJECT, } reques
t = getattr(instance, 'request', None) if request: context_dict['request'] = request context_dict['userkey_list_uri'] = request.build_absolute_uri( reverse('django_sshkey.views.userkey_list')) text_content = render_to_string('sshkey/add_key.txt', context_dict) msg = EmailMultiAlternatives( settings.SSHKEY_EMAIL_ADD_KEY_SUBJECT, text_content, settings.SSHKEY_FROM_EMAIL,
[instance.user.email], ) if settings.SSHKEY_SEND_HTML_EMAIL: html_content = render_to_string('sshkey/add_key.html', context_dict) msg.attach_alternative(html_content, 'text/html') msg.send()
genialis/resolwe-bio-py
src/resdk/utils/table_cache.py
Python
apache-2.0
2,205
0
"""Cache util functions for ReSDKTables.""" import os import pickle import sys from shutil import rmtree from typing import Any from resdk.__about__ import __version__ def _default_cache_dir() -> str: """Return default cache directory specific for the current OS. Code originally from Orange3.misc.environ. """ if sys.platform == "darwin": base = os.path.expan
duser("~/Library/Caches") elif sys.platform == "win32": base = os.getenv("APPDATA", os.path.expanduser("~
/AppData/Local")) elif os.name == "posix": base = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache")) else: base = os.path.expanduser("~/.cache") return base def cache_dir_resdk_base() -> str: """Return base ReSDK cache directory.""" return os.path.join(_default_cache_dir(), "ReSDK") def cache_dir_resdk() -> str: """Return ReSDK cache directory.""" v = __version__ if "dev" in v: # remove git commit hash v = v[: v.find("dev") + 3] base = os.path.join(cache_dir_resdk_base(), v) if sys.platform == "win32": # On Windows cache and data dir are the same. # Microsoft suggest using a Cache subdirectory return os.path.join(base, "Cache") else: return base def clear_cache_dir_resdk() -> None: """Delete all cache files from the default cache directory.""" cache_dir = cache_dir_resdk_base() if os.path.exists(cache_dir): rmtree(cache_dir) def load_pickle(pickle_file: str) -> Any: """Load object from the pickle file. :param pickle_file: file path :return: un-pickled object """ if os.path.exists(pickle_file): with open(pickle_file, "rb") as handle: return pickle.load(handle) def save_pickle(obj: Any, pickle_file: str, override=False) -> None: """Save given object into a pickle file. :param obj: object to bi pickled :param pickle_file: file path :param override: if True than override existing file :return: """ if not os.path.exists(pickle_file) or override: with open(pickle_file, "wb") as handle: pickle.dump(obj, handle, protocol=pickle.HIGHEST_PROTOCOL)
olavopeixoto/plugin.video.brplay
resources/lib/modules/kodi_util.py
Python
gpl-3.0
563
0.001776
import locale import threading from contextlib import contextmanager LOCALE_LOCK = threading.Lock() BR_DATESHORT_FORMAT = '%a, %d %b - %Hh%M' @contextmanager def setlocale(name): with LOCALE_LOCK:
saved = locale.setlocale(locale.LC_ALL) try: yield locale.setlocale(locale.LC_ALL, name) except: yield finally: locale.setlocale(locale.LC_ALL, saved) def format_datetimeshort(date_time): with setlocale('pt_BR'): return date_time.strftime
(BR_DATESHORT_FORMAT).decode('utf-8')
bskari/pi-rc
host_files.py
Python
gpl-2.0
5,869
0.002045
#!/bin/env python """Hosts files from the local directory using SSL.""" from __future__ import print_function import signal import socket import ssl import subprocess import sys import threading killed = False # pylint: disable=C0411 if sys.version_info.major < 3: import SimpleHTTPServer import SocketServer import urllib Server = SocketServer.TCPServer SimpleHTTPRequestHandler = SimpleHTTPServer.SimpleHTTPRequestHandler urlopen = urllib.urlopen decode = lambda s: s.decode('string_escape') else: from http.server import SimpleHTTPRequestHandler, HTTPServer # pylint: disable=E0401 Server = HTTPServer # pylint: disable=C0103 import urllib.request urlopen = urllib.request.urlopen decode = lambda s: bytes(s, 'utf-8').decode('unicode-escape') class InterruptibleServer(Server): def __init__(self, server_address, handler): if sys.version_info.major < 3: # Python 2's TCPServer is an old style class Server.__init__(self, server_address, handler) else: super().__init__(server_address, handler) def serve_until_shutdown(self): global killed while not killed: self.handle_request() class PostCommandsRequestHandler(SimpleHTTPRequestHandler): # pylint: disable=R0903 """Serves files over GET and handles commands send over POST.""" def do_POST(self): # pylint: disable=C0103 """Handles POST requests.""" if not self.path.endswith('/'): # Redirect browser - doing basically what Apache does self.send_response(301) self.send_header('Location', self.path + '/') self.end_headers() elif self.path == '/command/': # Forward this request on to the C server, b
ecause doing SSL in C # sounds hard content_length = int(self.headers.get('Content-Length')) post_data = self.rfile.read(content_length) print(post_data) try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(('localhost', 12345)) sock.sendall(post_data) except Exception as exc: print('{}, sending 500'.format(exc)) self.send_response(500) self.send_header('Content-type', 'text/plain; charset=utf-8') self.end_headers() # Firefox keeps expecting to get XML back. If we send back # plain text, it doesn't error out, but it generates a console # warning, so let's just play nice self.wfile.write('<p>Unable to contact pi_pcm; is it running?</p>') return finally: sock.close() self.send_response(200) self.end_headers() elif self.path == '/save/': content_length = int(self.headers.get('Content-Length')) post_data = decode(self.rfile.read(content_length)) with open('parameters.json', 'w') as parameters_file: parameters_file.write(post_data) self.send_response(200) self.end_headers() else: self.send_response(404) self.end_headers() def kill_servers(*_): global killed killed = True def main(): """Main.""" signal.signal(signal.SIGINT, kill_servers) # The URL fetching stuff inherits this timeout socket.setdefaulttimeout(0.25) # Prevent "address already in use" errors Server.allow_reuse_address = True base_cert_file_name = 'www.pi-rc.com' try: with open(base_cert_file_name + '.cert'): pass except IOError: print( '''Chrome requires HTTPS to access the webcam. This script can serve HTTPS requests, but requires that a self-signed certificate be generated first. When you access this page, you will get a warning - just click through it. This script will now generate a self-signed certificate.''' ) subprocess.call(( 'openssl', 'req', '-new', '-newkey', 'rsa:4096', '-days', '365', '-nodes', '-x509', '-subj', '/C=US/ST=Denial/L=Springfield/O=Dis/CN={}'.format(base_cert_file_name), '-keyout', '{}.key'.format(base_cert_file_name), '-out', '{}.cert'.format(base_cert_file_name) )) print('Starting servers') secure_port = 4443 server_address = ('0.0.0.0', secure_port) secure_httpd = InterruptibleServer(server_address, PostCommandsRequestHandler) secure_httpd.socket = ssl.wrap_socket( secure_httpd.socket, server_side=True, certfile='{}.cert'.format(base_cert_file_name), keyfile='{}.key'.format(base_cert_file_name), ssl_version=ssl.PROTOCOL_TLSv1 ) insecure_port = 8080 server_address = ('0.0.0.0', insecure_port) insecure_httpd = InterruptibleServer(server_address, PostCommandsRequestHandler) try: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) skari_org = '149.154.158.78' # This won't actually make a connection sock.connect((skari_org, 1)) ip = sock.getsockname()[0] except socket.gaierror: ip = 'localhost' finally: sock.close() print( 'Running server on https://{ip}:{secure_port}/ and http://{ip}:{insecure_port}/'.format( ip=ip, secure_port=secure_port, insecure_port=insecure_port ) ) secure_thread = threading.Thread(target=lambda: secure_httpd.serve_until_shutdown()) secure_thread.start() insecure_httpd.serve_until_shutdown() if __name__ == '__main__': main()
ehopsolidaires/ehop-solidaires.fr
ehop/ehopSolidaire_providers_register/forms.py
Python
agpl-3.0
10,906
0.006877
# -*- coding: utf-8 -*- # @copyright (C) 2014-2015 #Developpeurs 'BARDOU AUGUSTIN - BREZILLON ANTOINE - EUZEN DAVID - FRANCOIS SEBASTIEN - JOUNEAU NICOLAS - KIBEYA AISHA - LE CONG SEBASTIEN - # MAGREZ VALENTIN - NGASSAM NOUMI PAOLA JOVANY - OUHAMMOUCH SALMA - RIAND MORGAN - TREIMOLEIRO ALEX - TRULLA AURELIEN ' # @license https://www.gnu.org/licenses/gpl-3.0.html GPL version 3 from models import * from django.contrib.
auth.models import User as django_User from datetime import datetime from django import forms from django.contrib.gis.geos import Point class LoginForm(forms.ModelForm): class Meta: model = User widgets = { 'mail': forms.EmailInput(attrs={'aria-
invalid': 'true', 'pattern': 'email', 'required': 'required'}), } exclude = ['name', 'firstname', 'sex', 'city', 'zipCode', 'phone', 'idHomeAddress', 'idWorkAddress'] class EmailAuthBackend(object): def authenticate(self,username=None, password=None): try: user = django_User.objects.get(email=username) if user and check_password(password, user.password): return user except django_User.DoesNotExist: return None def authenticate2(self,username=None, password=None): try: user = Provider.objects.filter(idUser__mail__contains=username).first() if user and (check_password(password, user.password)): return user except User.DoesNotExist: return None def auth_email(self, username=None): try: user = Provider.objects.filter(idUser__mail__contains=username).first() if user: return user except User.DoesNotExist: return None def auth_email2(self, username=None): try: user = django_User.objects.get(email=username) if user: return user except User.DoesNotExist: return None class ContactForm(forms.Form): firstname = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'required': 'required'})) lastname = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'required': 'required'})) phone = forms.CharField(widget=forms.TextInput( attrs={'maxlength': '10', 'aria-invalid': 'true', 'pattern': 'phone', 'required': 'required'})) sender = forms.EmailField(widget=forms.EmailInput(attrs={'aria-invalid': 'false', 'pattern': 'email'}), required=False) subjectCHOICES = (('Demandeur','Je cherche un trajet'),('Offreur','Je souhaite proposer un trajet'), ('Infos','Informations diverses'),('Autre','Autre')) subject = forms.ChoiceField(choices=subjectCHOICES) goalOfApplicationCHOICES = [('', '')] + list(MenusSettings.objects.filter(type="goalOfApplication").values_list('string', 'string')) goalOfApplication = forms.ChoiceField(widget=forms.Select(attrs={'required':'required'}), choices=goalOfApplicationCHOICES, required=False) yearOfBirthCHOICES = (tuple((str(n), str(n)) for n in range(1900, datetime.now().year - 15))+(('',''),))[::-1] yearOfBirth = forms.ChoiceField(widget=forms.Select(attrs={'required':'required'}), choices=yearOfBirthCHOICES, required=False) message = forms.CharField(widget=forms.Textarea(attrs={'required': 'required'})) def __init__(self, *args, **kwargs): super(ContactForm, self).__init__(*args, **kwargs) self.fields['goalOfApplication'].choices = get_menus_settings('goalOfApplication') def get_menus_settings(type, required=True): if required: return [('', '')] + list(MenusSettings.objects.filter(type=type).values_list('string', 'string')) else: return list(MenusSettings.objects.filter(type=type).values_list('string', 'string')) class UserRegisterForm(forms.ModelForm): class Meta: model = User widgets = { 'name': forms.TextInput(attrs={'required': 'required'}), 'firstname': forms.TextInput(attrs={'required': 'required'}), 'sex': forms.RadioSelect(attrs={'required': 'required'}), 'city': forms.TextInput(attrs={'required': 'required'}), 'zipCode': forms.TextInput(attrs={'maxlength': '5', 'aria-invalid': 'true', 'pattern': 'zipCode', 'required': 'required'}), 'mail': forms.EmailInput(attrs={'aria-invalid': 'true', 'pattern': 'email', 'required': 'required'}), 'phone': forms.TextInput(attrs={'maxlength': '10', 'aria-invalid': 'true', 'pattern': 'phone', 'required': 'required'}), } exclude = ['idHomeAddress', 'idWorkAddress'] class ProviderRegisterForm(forms.ModelForm): class Meta: model = Provider howKnowledgeCHOICES = get_menus_settings('howKnowledge') widgets = { 'password': forms.PasswordInput(attrs={'id': 'password', 'required': 'required'}), 'company': forms.TextInput(attrs={'list':'datalistCompany', 'autocomplete':'off'}), 'howKnowledge': forms.Select(attrs={'required':'required'}, choices=howKnowledgeCHOICES) } exclude = ['idUser', 'is_active', 'last_login'] def __init__(self, *args, **kwargs): super(ProviderRegisterForm, self).__init__(*args, **kwargs) self.fields['howKnowledge'].choices = get_menus_settings('howKnowledge') class ProviderForm2(forms.ModelForm): class Meta: model = Provider howKnowledgeCHOICES = [('','')] + list(MenusSettings.objects.filter(type="howKnowledge").values_list('string', 'string')) widgets = { 'company': forms.TextInput(attrs={'list': 'datalistCompany', 'autocomplete': 'off'}), 'howKnowledge': forms.Select(attrs={'required': 'required'}, choices=howKnowledgeCHOICES) } exclude = ['idUser', 'is_active', 'last_login', 'password'] def __init__(self, *args, **kwargs): super(ProviderForm2, self).__init__(*args, **kwargs) self.fields['howKnowledge'].choices = get_menus_settings('howKnowledge') class AddressRegisterForm(forms.ModelForm): latlng = forms.CharField(widget=forms.HiddenInput(), required=False,) cityHide = forms.CharField(widget=forms.HiddenInput(), required=False,) zipCodeHide = forms.CharField(widget=forms.HiddenInput(), required=False,) class Meta: model = Address widgets = { 'street':forms.TextInput(attrs={'class': 'field', 'placeholder': 'Indiquez un lieu', 'autocomplete': 'on', 'required': 'required'}), } exclude = ['idAddress', 'point', 'city', 'zipCode'] def clean(self): cleaned_data = super(AddressRegisterForm, self).clean() coord = cleaned_data['latlng'].replace('(', '') city = cleaned_data['cityHide'] zipcode = cleaned_data['zipCodeHide'] if city == "": city = "undefined" if zipcode == "undefined" or zipcode == "": zipcode = 0 if coord == "" or coord == "undefined": raise forms.ValidationError("Bad address") coord = coord.replace(')', '') coordTab = coord.split(',') cleaned_data['point'] = 'POINT(%f %f)' % (float(coordTab[0]), float(coordTab[1])) cleaned_data['city'] = city cleaned_data['zipCode'] = zipcode return cleaned_data class AddressRegisterFormWork(forms.ModelForm): latlng = forms.CharField(widget=forms.HiddenInput(), required=False,) cityHide = forms.CharField(widget=forms.HiddenInput(), required=False,) zipCodeHide = forms.CharField(widget=forms.HiddenInput(), required=False,) class Meta: model = Address widgets = { 'street': forms.TextInput(attrs={'class': 'field', 'placeholder': 'Indiquez un lieu', 'autocomplete': 'on', 'required': 'required'}), } exclude = ['idAddress', 'point', 'city', 'zipCode'] def clean(self): cleaned_da
hazrpg/calibre
src/calibre/srv/utils.py
Python
gpl-3.0
15,349
0.003192
#!/usr/bin/env python2 # vim:fileencoding=utf-8 from __future__ import (unicode_literals, division, absolute_import, print_function) __license__ = 'GPL v3' __copyright__ = '2015, Kovid Goyal <kovid at kovidgoyal.net>' import errno, socket, select, os from Cookie import SimpleCookie from contextlib import closing from urlparse import parse_qs import repr as reprlib from email.utils import formatdate from operator import itemgetter from future_builtins import map from urllib import quote as urlquote from binascii import hexlify, unhexlify from calibre import prints from calibre.constants import iswindows from calibre.utils.config_base import tweaks from calibre.utils.localization import get_translator from calibre.utils.socket_inheritance import set_socket_inherit from calibre.utils.logging import ThreadSafeLog from calibre.utils.shared_file import share_open, raise_winerror HTTP1 = 'HTTP/1.0' HTTP11 = 'HTTP/1.1' DESIRED_SEND_BUFFER_SIZE = 16 * 1024 # windows 7 uses an 8KB sndbuf def http_date(timeval=None): return type('')(formatdate(timeval=timeval, usegmt=True)) class MultiDict(dict): # {{{ def __setitem__(self, key, val): vals = dict.get(self, key, []) vals.append(val) dict.__setitem__(self, key, vals) def __getitem__(self, key): return dict.__getitem__(self, key)[-1] @staticmethod def create_from_query_string(qs): ans = MultiDict() for k, v in parse_qs(qs, keep_blank_values=True).iteritems(): dict.__setitem__(ans, k.decode('utf-8'), [x.decode('utf-8') for x in v]) return ans def update_from_listdict(self, ld): for key, values in ld.iteritems(): for val in values: self[key] = val def items(self, duplicates=True): for k, v in dict.iteritems(self): if duplicates: for x in v: yield k, x else: yield k, v[-1] iteritems = items def values(self, duplicates=True): for v in dict.itervalues(self): if duplicates: for x in v: yield x else: yield v[-1] itervalues = values def set(self, key, val, replace_all=False): if replace_all: dict.__setitem__(self, key, [val]) else: self[key] = val def get(self, key, default=None, all=False): if all: try: return dict.__getitem__(self, key) except KeyError: return [] try: return self.__getitem__(key) except KeyError: return default def pop(self, key, default=None, all=False): ans = dict.pop(self, key, default) if ans is default: return [] if all else default return ans if all else ans[-1] def __repr__(self): return '{' + ', '.join('%s: %s' % (reprlib.repr(k), reprlib.repr(v)) for k, v in self.iteritems()) + '}' __str__ = __unicode__ = __repr__ def pretty(self, leading_whitespace=''): return leading_whitespace + ('\n' + leading_whitespace).join( '%s: %s' % (k, (repr(v) if isinstance(v, bytes) else v)) for k, v in sorted(self.items(), key=itemgetter(0))) # }}} def error_codes(*errnames): ''' Return error numbers for error names, ignoring non-existent names ''' ans = {getattr(errno, x, None) for x in errnames} ans.discard(None) return ans socket_errors_eintr = error_codes("EINTR", "WSAEINTR") socket_errors_socket_closed = error_codes( # errors indicating a disconnected connection "EPIPE", "EBADF", "WSAEBADF", "ENOTSOCK", "WSAENOTSOCK", "ENOTCONN", "WSAENOTCONN", "ESHUTDOWN", "WSAESHUTDOWN", "ETIMEDOUT", "WSAETIMEDOUT", "ECONNREFUSED", "WSAECONNREFUSED", "ECONNRESET", "WSAECONNRESET", "ECONNABORTED", "WSAECONNABORTED", "ENETRESET", "WSAENETRESET", "EHOSTDOWN", "EHOSTUNREACH", ) socket_errors_nonblocking = error_codes( 'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK') def start_cork(sock): if hasattr(socket, 'TCP_CORK'): sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 1) def stop_cork(sock): if hasattr(socket, 'TCP_CORK'): sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_CORK, 0) def create_sock_pair(port=0): '''C
reate socket pair. Works also on windows by using an ephemeral TCP port.''' if hasattr(socket, 'socketpair'): client_sock, srv_sock = socket.socketpair() set_socket_inherit(client_sock, False), set_socket_inherit(srv_sock, False) return client_sock, srv_sock #
Create a non-blocking temporary server socket temp_srv_sock = socket.socket() set_socket_inherit(temp_srv_sock, False) temp_srv_sock.setblocking(False) temp_srv_sock.bind(('127.0.0.1', port)) port = temp_srv_sock.getsockname()[1] temp_srv_sock.listen(1) with closing(temp_srv_sock): # Create non-blocking client socket client_sock = socket.socket() client_sock.setblocking(False) set_socket_inherit(client_sock, False) try: client_sock.connect(('127.0.0.1', port)) except socket.error as err: # EWOULDBLOCK is not an error, as the socket is non-blocking if err.errno not in socket_errors_nonblocking: raise # Use select to wait for connect() to succeed. timeout = 1 readable = select.select([temp_srv_sock], [], [], timeout)[0] if temp_srv_sock not in readable: raise Exception('Client socket not connected in {} second(s)'.format(timeout)) srv_sock = temp_srv_sock.accept()[0] set_socket_inherit(srv_sock, False) client_sock.setblocking(True) return client_sock, srv_sock def parse_http_list(header_val): """Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Neither commas nor quotes count if they are escaped. Only double-quotes count, not single-quotes. """ if isinstance(header_val, bytes): slash, dquote, comma = b'\\",' empty = b'' else: slash, dquote, comma = '\\",' empty = '' part = empty escape = quote = False for cur in header_val: if escape: part += cur escape = False continue if quote: if cur == slash: escape = True continue elif cur == dquote: quote = False part += cur continue if cur == comma: yield part.strip() part = empty continue if cur == dquote: quote = True part += cur if part: yield part.strip() def parse_http_dict(header_val): 'Parse an HTTP comma separated header with items of the form a=1, b="xxx" into a dictionary' if not header_val: return {} ans = {} sep, dquote = b'="' if isinstance(header_val, bytes) else '="' for item in parse_http_list(header_val): k, v = item.partition(sep)[::2] if k: if v.startswith(dquote) and v.endswith(dquote): v = v[1:-1] ans[k] = v return ans def sort_q_values(header_val): 'Get sorted items from an HTTP header of type: a;q=0.5, b;q=0.7...' if not header_val: return [] def item(x): e, r = x.partition(';')[::2] p, v = r.partition('=')[::2] q = 1.0 if p == 'q' and v: try: q = max(0.0, min(1.0, float(v.strip()))) except Exception: pass return e.strip(), q return tuple(map(itemgetter(0), sorted(map(item, parse_http_list(header_val)), key=itemgetter(1), reverse=True))) def eintr_retry_call(func, *args, **kwargs): while True: try: return func(*args, **kwargs) except E
Jozhogg/iris
lib/iris/tests/test_analysis_calculus.py
Python
lgpl-3.0
26,634
0.003717
# (C) British Crown Copyright 2010 - 2014, Met Office # # This file is part of Iris. # # Iris is free software: you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the # Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Iris is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Iris. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) # import iris tests first so that some things can be initialised before importing anything else import iris.tests as tests import unittest import numpy as np import iris import iris.analysi
s.calculus import iris.cube import iris.coord_systems import iris.coords import iris.tests.stock from iris.coords import DimCoord from iris.tests.test_interpolation import normalise_order class TestCubeDelta(tests.IrisTest): def test_invalid(self): cube = iris
.tests.stock.realistic_4d() with self.assertRaises(iris.exceptions.CoordinateMultiDimError): t = iris.analysis.calculus.cube_delta(cube, 'surface_altitude') with self.assertRaises(iris.exceptions.CoordinateMultiDimError): t = iris.analysis.calculus.cube_delta(cube, 'altitude') with self.assertRaises(ValueError): t = iris.analysis.calculus.cube_delta(cube, 'forecast_period') def test_delta_coord_lookup(self): cube = iris.cube.Cube(np.arange(10), standard_name='air_temperature') # Add a coordinate with a lot of metadata. coord = iris.coords.DimCoord(np.arange(10), long_name='projection_x_coordinate', var_name='foo', attributes={'source': 'testing'}, units='m', coord_system=iris.coord_systems.OSGB()) cube.add_dim_coord(coord, 0) delta = iris.analysis.calculus.cube_delta(cube, 'projection_x_coordinate') delta_coord = delta.coord('projection_x_coordinate') self.assertEqual(delta_coord, delta.coord(coord)) self.assertEqual(coord, cube.coord(delta_coord)) class TestDeltaAndMidpoint(tests.IrisTest): def _simple_filename(self, suffix): return tests.get_result_path(('analysis', 'delta_and_midpoint', 'simple%s.cml' % suffix)) def test_simple1_delta_midpoint(self): a = iris.coords.DimCoord((np.arange(4, dtype=np.float32) * 90) - 180, long_name='foo', units='degrees', circular=True) self.assertXMLElement(a, self._simple_filename('1')) delta = iris.analysis.calculus._construct_delta_coord(a) self.assertXMLElement(delta, self._simple_filename('1_delta')) midpoint = iris.analysis.calculus._construct_midpoint_coord(a) self.assertXMLElement(midpoint, self._simple_filename('1_midpoint')) def test_simple2_delta_midpoint(self): a = iris.coords.DimCoord((np.arange(4, dtype=np.float32) * -90) + 180, long_name='foo', units='degrees', circular=True) self.assertXMLElement(a, self._simple_filename('2')) delta = iris.analysis.calculus._construct_delta_coord(a) self.assertXMLElement(delta, self._simple_filename('2_delta')) midpoint = iris.analysis.calculus._construct_midpoint_coord(a) self.assertXMLElement(midpoint, self._simple_filename('2_midpoint')) def test_simple3_delta_midpoint(self): a = iris.coords.DimCoord((np.arange(4, dtype=np.float32) * 90) - 180, long_name='foo', units='degrees', circular=True) a.guess_bounds(0.5) self.assertXMLElement(a, self._simple_filename('3')) delta = iris.analysis.calculus._construct_delta_coord(a) self.assertXMLElement(delta, self._simple_filename('3_delta')) midpoint = iris.analysis.calculus._construct_midpoint_coord(a) self.assertXMLElement(midpoint, self._simple_filename('3_midpoint')) def test_simple4_delta_midpoint(self): a = iris.coords.AuxCoord(np.arange(4, dtype=np.float32) * 90 - 180, long_name='foo', units='degrees') a.guess_bounds() b = a.copy() self.assertXMLElement(b, self._simple_filename('4')) delta = iris.analysis.calculus._construct_delta_coord(b) self.assertXMLElement(delta, self._simple_filename('4_delta')) midpoint = iris.analysis.calculus._construct_midpoint_coord(b) self.assertXMLElement(midpoint, self._simple_filename('4_midpoint')) def test_simple5_not_degrees_delta_midpoint(self): # Not sure it makes sense to have a circular coordinate which does not have a modulus but test it anyway. a = iris.coords.DimCoord(np.arange(4, dtype=np.float32) * 90 - 180, long_name='foo', units='meter', circular=True) self.assertXMLElement(a, self._simple_filename('5')) delta = iris.analysis.calculus._construct_delta_coord(a) self.assertXMLElement(delta, self._simple_filename('5_delta')) midpoints = iris.analysis.calculus._construct_midpoint_coord(a) self.assertXMLElement(midpoints, self._simple_filename('5_midpoint')) def test_simple6_delta_midpoint(self): a = iris.coords.DimCoord(np.arange(5, dtype=np.float32), long_name='foo', units='count', circular=True) midpoints = iris.analysis.calculus._construct_midpoint_coord(a) self.assertXMLElement(midpoints, self._simple_filename('6')) def test_singular_delta(self): # Test single valued coordinate mid-points when circular lon = iris.coords.DimCoord(np.float32(-180.), 'latitude', units='degrees', circular=True) r_expl = iris.analysis.calculus._construct_delta_coord(lon) self.assertXMLElement(r_expl, ('analysis', 'delta_and_midpoint', 'delta_one_element_explicit.xml')) # Test single valued coordinate mid-points when not circular lon.circular = False with self.assertRaises(ValueError): iris.analysis.calculus._construct_delta_coord(lon) def test_singular_midpoint(self): # Test single valued coordinate mid-points when circular lon = iris.coords.DimCoord(np.float32(-180.), 'latitude', units='degrees', circular=True) r_expl = iris.analysis.calculus._construct_midpoint_coord(lon) self.assertXMLElement(r_expl, ('analysis', 'delta_and_midpoint', 'midpoint_one_element_explicit.xml')) # Test single valued coordinate mid-points when not circular lon.circular = False with self.assertRaises(ValueError): iris.analysis.calculus._construct_midpoint_coord(lon) class TestCoordTrig(tests.IrisTest): def setUp(self): points = np.arange(20, dtype=np.float32) * 2.3 bounds = np.concatenate([[points - 0.5 * 2.3], [points + 0.5 * 2.3]]).T self.lat = iris.coords.AuxCoord(points, 'latitude', units='degrees', bounds=bounds) self.rlat = iris.coords.AuxCoord(np.deg2rad(points), 'latitude', units='radians', bounds=np.deg2rad(bounds)) def test_sin(self): sin_of_coord = iris.analysis.calculus._coord_sin(self.lat) sin_of_coord_radians = iris.analysis.calculus._coord_sin(self.rlat) # Check the values are correct (within a tolerance) np.testing.assert_array_almost_equal(np.sin(self.rlat.points), sin_of_coord.points) np.testing.assert_array_almost_equal(np.sin(self.rlat.bounds), sin_of_coord.bounds) # Check that the results of the sin function are almost equal when operating on a coord with degrees and radians np.testing.assert_a
treejames/erpnext
erpnext/stock/doctype/item_attribute/item_attribute.py
Python
agpl-3.0
1,275
0.025882
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document from frappe import _ class ItemAttribute(Document): def validate(self): self.validate_duplication() self.validate_attribute_values() def validate_duplication(self): values, abbrs = [], [] for d in self.item_attribute_values: d.abbr = d.abbr.upper() if
d.attribute_value in values: frappe.throw
(_("{0} must appear only once").format(d.attribute_value)) values.append(d.attribute_value) if d.abbr in abbrs: frappe.throw(_("{0} must appear only once").format(d.abbr)) abbrs.append(d.abbr) def validate_attribute_values(self): attribute_values = [] for d in self.item_attribute_values: attribute_values.append(d.attribute_value) variant_attributes = frappe.db.sql("select DISTINCT attribute_value from `tabVariant Attribute` where attribute=%s", self.name) if variant_attributes: for d in variant_attributes: if d[0] not in attribute_values: frappe.throw(_("Attribute Value {0} cannot be removed from {1} as Item Variants exist with this Attribute.").format(d[0], self.name))
kelle/astropy
astropy/stats/lombscargle/implementations/tests/test_mle.py
Python
bsd-3-clause
1,921
0.000521
import pytest import numpy as np from numpy.testing import assert_allclose from .....extern.six.moves import range from ..mle import design_matrix, periodic_fit @pytest.fixture def t(): rand = np.random.RandomState(42) return 10 * rand.rand(10) @pytest.mark.parametrize('freq', [1.0, 2]) @pytest.mark.parametrize('dy', [None, 2.0]) @pytest.mark.parametrize('bias', [True, False]) def test_design_matrix(t, freq, dy, bias): X = design_matrix(t, freq, dy, bias=bias) assert X.shape == (t.shape[0], 2 + bool(bias)) if bias: assert_allclose(X[:, 0], 1. / (dy or 1.0)) assert_allclose(X[:, -2], np.sin(2 * np.pi * freq * t) / (dy or 1.0)) assert_allclose(X[:, -1], np.cos(2 * np.pi * freq * t) / (dy or 1.0)) @pytest.mark.parametrize('nterms',
range(4)) def test_multiterm_design_matrix(t, nterms):
dy = 2.0 freq = 1.5 X = design_matrix(t, freq, dy=dy, bias=True, nterms=nterms) assert X.shape == (t.shape[0], 1 + 2 * nterms) assert_allclose(X[:, 0], 1. / dy) for i in range(1, nterms + 1): assert_allclose(X[:, 2 * i - 1], np.sin(2 * np.pi * i * freq * t) / dy) assert_allclose(X[:, 2 * i], np.cos(2 * np.pi * i * freq * t) / dy) @pytest.mark.parametrize('nterms', range(1, 4)) @pytest.mark.parametrize('freq', [1, 2]) @pytest.mark.parametrize('fit_mean', [True, False]) def test_exact_mle_fit(nterms, freq, fit_mean): rand = np.random.RandomState(42) t = 10 * rand.rand(30) theta = -1 + rand.rand(2 * nterms + 1) y = np.zeros(t.shape) if fit_mean: y = theta[0] * np.ones(t.shape) for i in range(1, nterms + 1): y += theta[2 * i - 1] * np.sin(2 * np.pi * i * freq * t) y += theta[2 * i] * np.cos(2 * np.pi * i * freq * t) y_fit = periodic_fit(t, y, dy=1, frequency=freq, t_fit=t, nterms=nterms, center_data=False, fit_mean=fit_mean) assert_allclose(y, y_fit)
simontakite/sysadmin
pythonscripts/programmingpython/Preview/peoplegui--frame.py
Python
gpl-2.0
2,035
0.00344
""" See peoplegui--old.py: the alternative here uses nedted row frames with fixed widdth labels with pack() to acheive the same aligned layout as grid(), but it takes two extra lines of code as is (though adding window resize support makes the two techniques roughly the same--see later in the book). """ from tkinter import * from tkinter.messagebox import showerror import shelve shelvename = 'class-shelve' fieldnames = ('name', 'age', 'job', 'pay') def makeWidgets(): global entries window = Tk() window.title('People Shelve') form = Frame(window) form.pack() entries = {} for label in ('key',) + fieldnames: row = Frame(form) lab = Label(row, text=label, width=6) ent = Entry(row) row.pack(side=TOP) lab.pack(side=LEFT) ent.pack(side=RIGHT) entries[label] = ent Button(window, text="Fetch", command=fetchRecord).pack(side=LEFT) Button(window, text="Update", command=updateRecord).pack(side=LEFT) Button(window, text="Quit", command=window.qui
t).pack(side=RIGHT) return window def fetchRecord(): key = entries['key'].get() try: record = db[key] # fetch by key, show in GUI except: showerror(title='Error', message='No such key!'
) else: for field in fieldnames: entries[field].delete(0, END) entries[field].insert(0, repr(getattr(record, field))) def updateRecord(): key = entries['key'].get() if key in db: record = db[key] # update existing record else: from person import Person # make/store new one for key record = Person(name='?', age='?') # eval: strings must be quoted for field in fieldnames: setattr(record, field, eval(entries[field].get())) db[key] = record db = shelve.open(shelvename) window = makeWidgets() window.mainloop() db.close() # back here after quit or window close
simvisage/oricreate
oricreate/mapping_tasks/__init__.py
Python
gpl-3.0
210
0
from .map_to_surface import \ MapToSurface from .mapping_task import \ MappingTask from .mask_task import \ MaskTask from .move_task import \
MoveTask from .rotate_copy import \ RotateCopy
memphis-iis/demo-track
demotrack/model.py
Python
apache-2.0
1,712
0.001168
import logging from .data import DefinedTable logger = logging.getLogger(__name__) def ensure_tables(): """When called, ensure that all the tables that we need are created in the database. The real work is supplied by the DefinedTable base class """ for tab in [Subject, ExpCondition]: logger.debug("Creating table %s", tab.get_table_name()) tab.ensure_table() class Subject(DefinedTable): """An experimental subject that we are tracking in an experimental condition """ @classmethod def get_table_name(self): return "Subjects" @classmethod def
get_key_name(self): return "subject_id" def __init__( self, subject_id=None, first_name=None, last_name=None, email=None, exp_condition=None ): self.subject_id = subj
ect_id self.first_name = first_name self.last_name = last_name self.email = email self.exp_condition = exp_condition def errors(self): if not self.subject_id: yield "Missing subject ID" if not self.exp_condition: yield "Missing Experimental Condition" class ExpCondition(DefinedTable): """A single experimental condition that any number of subjects may be a part of """ @classmethod def get_table_name(self): return "Conditions" @classmethod def get_key_name(self): return "condition_id" def __init__( self, condition_id=None, condition_name=None, description=None ): self.condition_id = condition_id self.condition_name = condition_name self.description = description
mverwe/JetRecoValidation
PuThresholdTuning/python/akPu3CaloJetSequence15_cff.py
Python
cc0-1.0
1,328
0.03012
import FWCore.ParameterSet.Config as cms from HeavyIonsAnalysis.JetAnalysis.jets.akPu3CaloJetSequence_PbPb_mc_cff import * #PU jets: type 15 akPu3Calomatch15 = akPu3Calomatch.clone(src = cms.InputTag("akPu3CaloJets15")) akPu3Caloparton15 = akPu3Caloparton.clone(src = cms.InputTag("akPu3CaloJets15")) akPu3Calocorr15 = akPu3Calocorr.clone(src = cms.InputTag("akPu3CaloJets15")) akPu3CalopatJets15 = akPu3CalopatJets.clone(jetSource = cms.InputTag("akPu3CaloJets15"), jetCorrFactorsSource = cms.VInputTag(cms.InputTag("akPu3Calocorr15")),
genJetMatch = cms.InputTag("akPu3Calomatch15"), genPartonMatch = cms.InputTag("akPu3Caloparton15"), ) akPu3CaloJetAnalyzer15 = akPu3CaloJetAnalyzer.clone(jetTag = cms.InputTag("akPu3CalopatJets15"), doSubEvent = cms.untracked.bool(True) ) akPu3CaloJetSequence15 = cms.Sequence(akPu3Calomatch15 *
akPu3Caloparton15 * akPu3Calocorr15 * akPu3CalopatJets15 * akPu3CaloJetAnalyzer15 )
anhstudios/swganh
data/scripts/templates/object/mobile/shared_dressed_liberation_patriot_rodian_female_01.py
Python
mit
471
0.046709
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Creature() result.template = "object/mobile/shared_dressed_liberation_patriot_rodian_female_01.iff" result.attr
ibute_template_id
= 9 result.stfName("npc_name","rodian_base_female") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result