repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
Venris/crazyflie-multilink
KM/kamera_testy/3d_position.py
Python
gpl-2.0
2,600
0.030385
import numpy as np def wzorProst3d(p1,p2): wsp = np.array([p1[0],(p2[0]-p1[0]),p1[1],(p2[1]-p1[1]),p1[2],(p2[2]-p1[2])]) return wsp def plaszczyznaRownolegla(p1,p2,p3): p12 = np.array([p2[0]-p1[0],p2[1]-p1[1],p2[2]-p1[2]]) p13 = np.array([p3[0]-p1[0],p3[1]-p1[1],p3[2]-p1[2]]) wek = np.cross(p12,p13) A = wek[0] B = wek[1] C = wek[2] D = -A*p1[0] - B*p1[1] - C*p1[2] return A,B,C,D def punktPrzeciecia(A,B,C,D,wsp): t = (-D - A*wsp[0] - B*wsp[2] - C*wsp[4]) / (A*wsp[1] + B*wsp[3]
+ C*wsp[5]) x =
wsp[1]*t + wsp[0] y = wsp[3]*t + wsp[2] z = wsp[5]*t + wsp[4] return x,y,z def plaszczyznaProsotopadla(wsp,x,y,z): A = wsp[1] B = wsp[3] C = wsp[5] D = -A*x - B*y - C*z return A,B,C,D def position_estimate(xp1,yp1,xp2,yp2): H = 2.0 # wysokosc kamery gornej h = 0.4 # wyokosc kamery bocznej L = 2.5 # odleglosc w Y kabery bocznej pod punkut(0,0,0) corner_kam_gora = np.array([(4.0/3.0),-1.0,0.0]) width_gora = 8.0/3.0 hight_gora = 2.0 corner_kam_bok = np.array([-1.6,1.0,1.6]) width_bok = 3.2 hight_bok = 2.4 #kamera gorna piksele pkam_gora = np.array([0.0, 0.0, H]) ppik_gora = np.array([(corner_kam_gora[0] - width_gora/1280.0*xp1),(corner_kam_gora[1] + hight_gora/960.0*yp1),corner_kam_gora[2]]) wsp_gora = wzorProst3d(pkam_gora, ppik_gora) #piksel pomocniczy do wyznaczania plaszczyzny ppik1_gora = np.array([-ppik_gora[0]+0.0001,ppik_gora[1],ppik_gora[2]]) #kamera boczna piksele pkam_bok = np.array([0.0, -L, h]) ppik_bok = np.array([(corner_kam_bok[0]+width_bok/640.0*xp2),corner_kam_bok[1],(corner_kam_bok[2]-hight_bok/480.0*yp2)]) wsp_bok = wzorProst3d(pkam_bok, ppik_bok) #plaszczyzna rownolegla do piksela gornego i przechodzaca przez piksel pomocniczy A,B,C,D = plaszczyznaRownolegla(pkam_gora, ppik_gora, ppik1_gora) #punkt przeciecia plaszczyzny rownoleglej z pikselem kamery bocznej x1,y1,z1 = punktPrzeciecia(A, B, C, D, wsp_bok) #plaszczyzna prostopadla do piksela gornego i przechodzaca przez punkt(x1,y1,z1) A1,B1,C1,D1 = plaszczyznaProsotopadla(wsp_gora, x1, y1, z1) #punkt przecia plaszczyzny prosotpadlej z pikselem kamery gornej x2,y2,z2 = punktPrzeciecia(A1, B1, C1, D1, wsp_gora) #przyplizone polozenie drona x = (x1 + x2) / 2.0 y = (y1 + y2) / 2.0 z = (z1 + z2) / 2.0 return x,y,z ###################################################################################### x,y,z = position_estimate(240, 820, 490, 280) dron = [x,y,z] print(dron)
intel-analytics/BigDL
python/orca/src/bigdl/orca/cpu_info.py
Python
apache-2.0
2,911
0.000687
# # Copyright 2016 The BigDL Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import subprocess import re from typing import Optional def get_cgroup_cpuset(): with open("/sys/fs/cgroup/cpuset/cpuset.cpus", "r") as f: content = f.readlines() cpu_set = [] values = content[0].strip().split(",") for value in values: if "-" in value: # Parse the value like "2-4" start, end = value.split("-") cpu_set.extend([i for i in range(int(start), int(end) + 1)]) else: cpu_set.append(int(value)) return cpu_set def get_cpu_info(): cpuinfo = [] args = ["lscpu", "--parse=CPU,Core,Socket"] lscpu_info = subprocess.check_output(args, universal_newlines=True).split("\n") # Get information about cpu, core, socket and node for line in lscpu_info: pattern = r"^([\d]+,[\d]+,[\d]+)" regex_out = re.search(pattern, line) if regex_out: cpuinfo.append(regex_out.group(1).strip().split(",")) get_physical_core = {} get_socket = {} for line in cpuinfo: int_line = [int(x) for x in line] l_id, p_id, s_id = int_line get_physical_core[l_id] = p_id get_socket[l_id] = s_id return get_physical_core, get_socket def schedule_workers(num_workers: int, cores_per_worker: O
ptional[int] = None): # If we are in a docker container whose --cpuset-cpus are set, # we can get available cpus in /sys/fs/cgroup/cpuset/cpuset.cpus. # If we are not in a container, this just return all cpus. cpuset = get_cgroup_cpuset() cpuset = sorted(cpuset) l_core_to_p_core, l_core_to_socket = get_cpu_info() p2l = {} p_cores = set() for logical_core in cpuset:
physical_core = l_core_to_p_core[logical_core] p_cores.add(physical_core) if physical_core not in p2l: p2l[physical_core] = logical_core p_cores = sorted(p_cores) if cores_per_worker is None: cores_per_worker = len(p_cores) // num_workers msg = "total number of cores requested must be smaller or" \ " equal than the physical cores available" assert cores_per_worker * num_workers <= len(p_cores), msg schedule = [] for i in range(num_workers): schedule.append([p2l[core] for core in p_cores[i*cores_per_worker:(i+1)*cores_per_worker]]) return schedule
allenlavoie/tensorflow
tensorflow/contrib/distributions/python/kernel_tests/bijectors/cholesky_outer_product_test.py
Python
apache-2.0
5,365
0.006151
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for Bijector.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib.distributions.python.ops import bijectors from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.platform import test class CholeskyOuterProductBijectorTest(test.TestCase): """Tests the correctness of the Y = X @ X.T transformation.""" def testBijectorMatrix(self): with self.test_session(): bijector = bijectors.CholeskyOuterProduct(validate_args=True) self.assertEqual("cholesky_outer_product", bijector.name) x = [[[1., 0], [2, 1]], [[np.sqrt(2.), 0], [np.sqrt(8.), 1]]] y = np.matmul(x, np.transpose(x, axes=(0, 2, 1))) # Fairly easy to compute differentials since we have 2x2. dx_dy = [[[2. * 1, 0, 0], [2, 1, 0], [0, 2 * 2, 2 * 1]], [[2 * np.sqrt(2.), 0, 0], [np.sqrt(8.), np.sqrt(2.), 0], [0, 2 * np.sqrt(8.), 2 * 1]]] ildj = -np.sum( np.log(np.asarray(dx_dy).diagonal( offset=0, axis1=1, axis2=2)), axis=1) self.assertAllEqual((2, 2, 2), bijector.forward(x).get_shape()) self.assertAllEqual((2, 2, 2), bijector.inverse(y).get_shape()) self.assertAllClose(y, bijector.forward(x).eval()) self.assertAllClose(x, bijector.inverse(y).eval()) self.assertAllClose( ildj, bijector.inverse_log_det_jacobian(y).eval(), atol=0., rtol=1e-7) self.assertAllClose( -bijector.inverse_log_det_jacobian(y).eval(), bijector.forward_log_det_jacobian(x).eval(), atol=0., rtol=1e-7) def testNoBatchStatic(self): x = np.array([[1., 0], [2, 1]]) # np.linalg.cholesky(y) y = np.array([[1., 2], [2, 5]]) # np.matmul(x, x.T) with self.test_session() as sess: y_actual = bijectors.CholeskyOuterProduct().forward(x=x) x_actual = bijectors.CholeskyOuterProduct().inverse(y=y) [y_actual_, x_actual_] = sess.run([y_actual, x_actual]) self.assertAllEqual([2, 2], y_actual.get_shape()) self.assertAllEqual([2, 2], x_actual.get_shape()) self.assertAllClose(y, y_actual_) self.assertAllClose(x, x_actual_) def testNoBatchDeferred(self): x = np.array([[1., 0], [2, 1]]) # np.linalg.cholesky(y) y = np.array([[1., 2], [2, 5]]) # np.matmul(x, x.T) with self.test_session() as sess: x_pl = array_ops.placeholder(dtypes.float32) y_pl = array_ops.placeholder(dtypes.float32) y_actual = bijectors.CholeskyOuterProduct().forward(x=x_pl) x_actual = bijectors.CholeskyOuterProduct().inverse(y=y_pl) [y_actual_, x_actual_] = sess.run([y_actual, x_actual], feed_dict={x_pl: x, y_pl: y}) self.assertEqual(None, y_actual.get_shape()) self.assertEqual(None, x_actual.get_shape()) self.assertAllClose(y, y_actual_) self.assertAllClose(x, x_actual_) def testBatchStatic(self): x = np.array([[[1., 0], [2, 1]], [[3., 0], [1, 2]]]) # np.linalg.cholesky(y) y = np.array([[[1., 2], [2, 5]], [[9., 3], [3, 5]]]) # np.matmul(x, x.T) with self.test_session() as sess: y_actual = bijectors.CholeskyOuterProduct().forward(x=x) x_actual = bijectors.CholeskyOuterProduct().inverse(y=y) [y_actual_, x_actual_] = sess.run([y_actual, x_actual]) self.assertEqual([2, 2, 2], y_actual.get_shape()) self.assertEqual([2, 2, 2], x_actual.get_shape()) self.assertAllClose(y, y_actual_) self.assertAllClose(x, x_actual_) def testBatchDeferred(self): x = np.array([[[1., 0], [2, 1]], [[3., 0], [1, 2]]]) # np.linalg.cholesky(y) y = np.array([[[1., 2], [2, 5]], [[9., 3], [3, 5]]]) # np.matmu
l(x, x.T) with s
elf.test_session() as sess: x_pl = array_ops.placeholder(dtypes.float32) y_pl = array_ops.placeholder(dtypes.float32) y_actual = bijectors.CholeskyOuterProduct().forward(x=x_pl) x_actual = bijectors.CholeskyOuterProduct().inverse(y=y_pl) [y_actual_, x_actual_] = sess.run([y_actual, x_actual], feed_dict={x_pl: x, y_pl: y}) self.assertEqual(None, y_actual.get_shape()) self.assertEqual(None, x_actual.get_shape()) self.assertAllClose(y, y_actual_) self.assertAllClose(x, x_actual_) if __name__ == "__main__": test.main()
palmtree5/Red-DiscordBot
tests/cogs/test_trivia.py
Python
gpl-3.0
799
0.001252
import textwrap import yaml from schema import SchemaError
def test_trivia_lists(): from redbot.cogs.trivia import InvalidListError, get_core_lists, get_list list_names = get_core_lists() assert list_names problem_lists = [] for l in list_names: try: get_list(l) except InvalidListError as exc: e = exc.__cause__ if isinstance(e, SchemaError):
problem_lists.append((l.stem, f"SCHEMA error:\n{e!s}")) else: problem_lists.append((l.stem, f"YAML error:\n{e!s}")) if problem_lists: msg = "" for name, error in problem_lists: msg += f"- {name}:\n{textwrap.indent(error, ' ')}" raise TypeError("The following lists contain errors:\n" + msg)
ImageIntelligence/mimiron
mimiron/__init__.py
Python
mit
334
0
# -*- coding: utf-8 -*- from __future__ import unicode_literals __
all__ = [ '__version_info__',
'__version__', '__author__', '__author_email__', ] __version_info__ = (0, 4, 3) __version__ = '.'.join([unicode(i) for i in __version_info__]) __author__ = 'David Vuong' __author_email__ = 'david@imageintelligence.com'
khanhnnvn/poet
server.py
Python
mit
10,080
0.001091
#!/usr/bin/python2.7 import os import sys import zlib import base64 import socket import os.path import argparse from datetime import datetime import debug import module import config as CFG from poetsocket import * __version__ = '0.4.4' POSH_PROMPT = 'posh > ' FAKEOK = """HTTP/1.1 200 OK\r Date: Tue, 19 Mar 2013 22:12:25 GMT\r Server: Apache\r X-Powered-By: PHP/5.3.10-1ubuntu3.2\r Content-Length: 364\r Content-Type: text/plain\r \r body{background-color:#f0f0f2;margin:0;padding:0;font-family:"Open Sans","Helvetica Neue",Helvetica,Arial,sans-serif}div{width:600px;margin:5em auto;padding:50px;background-color:#fff;border-radius:1em}a:link,a:visited{color:#38488f;text-decoration:none}@media (max-width:700px){body{background-color:#fff}div{width:auto;margin:0 auto;border-radius:0;padding:1em}}""" class PoetSocketServer(PoetSocket): def __init__(self, port): self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.s.bind(('', port)) self.s.listen(1) def accept(self): return self.s.accept() class PoetServer(object): """Core server functionality. Implements control shell, and necessary helper functions. Attributes: s: socket instance for initial client connection conn: socket instance for actual client communication cmds: list of supported control shell commands """ def __init__(self, s): self.s = s self.conn = None self.
builtins = ['exit', 'help'] # exists so modules can stop server (used by selfdestruct) self.continue_ = True def start(self): """Poet server control shell.""" debug.info('Entering control shell') self.conn = PoetSocket(self.s.accept()[0]) print 'Welcome to posh, the Poet Shell!' print 'Running `help\' will give you a list of supported commands.' whi
le True: try: found = False argv = raw_input(POSH_PROMPT).split() # # builtins # if argv == []: continue if argv[0] == 'exit': break elif argv[0] == 'help': found = True print 'Commands:\n {}'.format('\n '.join(sorted(self.builtins + module.server_commands.keys()))) # # modules # # try to find command in registered modules for cmd, func in module.server_commands.iteritems(): if argv[0] == cmd: found = True try: func(self, argv) except Exception as e: self.info(str(e.args)) # see comment above for self.continue_ for why this is here if not self.continue_: return if not found: self.info('{}: command not found'.format(argv[0])) except KeyboardInterrupt: print continue except EOFError: print break self.conn.send('fin') debug.info('Exiting control shell') def info(self, msg): print 'posh : {}'.format(msg) def generic(self, req, write_flag=False, write_file=None): """Abstraction layer for exchanging with client and writing to file. Args: req: command to send to client write_flag: whether client response should be written write_file: optional filename to use for file """ resp = self.conn.exchange(req) # TODO: this hardcoding is bad, should be some generic way to see # if response should be decompressed. maybe a list of all keywords # which cause a compressed response to come back if req == 'recon': resp = zlib.decompress(resp) print resp if write_flag: self.write(resp, req.split()[0], write_file) def write(self, response, prefix, write_file=None): """Write to server archive. Args: response: data to write prefix: directory to write file to (usually named after command executed) write_file: optional filename to use for file """ ts = datetime.now().strftime('%Y%m%d%M%S') out_ts_dir = '{}/{}'.format(CFG.ARCHIVE_DIR, ts[:len('yyyymmdd')]) out_prefix_dir = '{}/{}'.format(out_ts_dir, prefix) # create filename to write to if write_file: chunks = write_file.split('.') # separate the file extension from the file name, default to .txt ext = '.{}'.format('.'.join(chunks[1:])) if chunks[1:] else '.txt' outfile = '{}/{}-{}{}'.format(out_prefix_dir, chunks[0], ts, ext) else: outfile = '{}/{}-{}.txt'.format(out_prefix_dir, prefix, ts) # create directories if they don't exist if not os.path.isdir(CFG.ARCHIVE_DIR): os.mkdir(CFG.ARCHIVE_DIR) if not os.path.isdir(out_ts_dir): os.mkdir(out_ts_dir) if not os.path.isdir(out_prefix_dir): os.mkdir(out_prefix_dir) # if file already exists, append unique digit to the end if os.path.exists(outfile): count = 1 orig_outfile = outfile outfile = orig_outfile + '.{}'.format(count) while os.path.exists(outfile): outfile = orig_outfile + '.{}'.format(count) count += 1 with open(outfile, 'w') as f: f.write(response) print 'posh : {} written to {}'.format(prefix, outfile) def exec_preproc(self, inp): """Parse posh `exec' command line. Args: inp: raw `exec' command line Returns: Tuple suitable for expansion into as self.generic() parameters. """ tmp = inp.split() write_file = None write_flag = tmp[1] == '-o' if write_flag: if '"' not in tmp[2]: write_file = tmp[2] del tmp[2] del tmp[1] tmp = ' '.join(tmp) return tmp, write_flag, write_file def get_args(): """ Parse arguments and return dictionary. """ parser = argparse.ArgumentParser() parser.add_argument('-p', '--port') parser.add_argument('-v', '--version', action='store_true', help='prints the Poet version number and exits') return parser.parse_args() def print_header(): """ Prints big ASCII logo and other info. """ print """ _ ____ ____ ___ / /_ / __ \/ __ \/ _ \/ __/ / /_/ / /_/ / __/ / / .___/\____/\___/\__/ v{} /_/ """.format(__version__) def die(msg=None): if msg: debug.err(msg) debug.err('Poet server terminated') sys.exit(0) def authenticate(ping): """Verify that the client is in fact connecting by checking the request path and the auth token contained in the cookie. Args: ping: http request sent from client (string) Returns: None: client authenticated successfully str: the reason authentication failed """ if ping.startswith('GET /style.css HTTP/1.1'): if 'Cookie: c={};'.format(base64.b64encode(CFG.AUTH)) in ping: return None else: return 'AUTH TOKEN' else: return 'REQUEST' def drop_privs(): try: new_uid = int(os.getenv('SUDO_UID')) new_gid = int(os.getenv('SUDO_GID')) except TypeError: # they were running directly from a root user and didn't have # sudo env variables print """[!] WARNING: Couldn't drop privileges! To avoid this error, run from a non-root user. You may also use sudo, from a non-root user. Continue? (y/n)""", if raw_input().lower()[0] == 'y': return die()
yuanagain/seniorthesis
venv/lib/python2.7/site-packages/scipy/sparse/dok.py
Python
mit
17,654
0.000906
"""Dictionary Of Keys based matrix""" from __future__ import division, print_function, absolute_import __docformat__ = "restructuredtext en" __all__ = ['dok_matrix', 'isspmatrix_dok'] import functools import operator import numpy as np from scipy._lib.six import zip as izip, xrange, iteritems, itervalues from .base import spmatrix, isspmatrix from .sputils import (isdense, getdtype, isshape, isintlike, isscalarlike, upcast, upcast_scalar, IndexMixin, get_index_dtype) try: from operator import isSequenceType as _is_sequence except ImportError: def _is_sequence(x): return (hasattr(x, '__len__') or hasattr(x, '__next__') or hasattr(x, 'next')) class dok_matrix(spmatrix, IndexMixin, dict): """ Dictionary Of Keys based sparse matrix. This is an efficient structure for constructing sparse matrices incrementally. This can be instantiated in several ways: dok_matrix(D) with a dense matrix, D dok_matrix(S) with a sparse matrix, S dok_matrix((M,N), [dtype]) create the matrix with initial shape (M,N) dtype is optional, defaulting to dtype='d' Attributes ---------- dtype : dtype Data type of the matrix shape : 2-tuple Shape of the matrix ndim : int Number of dimensions (this is always 2) nnz Number of nonzero elements Notes ----- Sparse matrices can be used in arithmetic operations: they support addition, subtraction, multiplication, division, and matrix power. Allows for efficient O(1) access of individual elements.
Duplicates are not allowed. Can be efficiently converted to a coo_matrix once constructed. Examples -------- >>> import numpy as np >>> from scipy.sparse import dok_matrix >>> S = dok_matrix((5, 5), dtype=np.float32) >>> for i in range(5): ... for j in range(5): ... S[i, j] = i + j # Update
element """ format = 'dok' def __init__(self, arg1, shape=None, dtype=None, copy=False): dict.__init__(self) spmatrix.__init__(self) self.dtype = getdtype(dtype, default=float) if isinstance(arg1, tuple) and isshape(arg1): # (M,N) M, N = arg1 self.shape = (M, N) elif isspmatrix(arg1): # Sparse ctor if isspmatrix_dok(arg1) and copy: arg1 = arg1.copy() else: arg1 = arg1.todok() if dtype is not None: arg1 = arg1.astype(dtype) self.update(arg1) self.shape = arg1.shape self.dtype = arg1.dtype else: # Dense ctor try: arg1 = np.asarray(arg1) except: raise TypeError('invalid input format') if len(arg1.shape) != 2: raise TypeError('expected rank <=2 dense array or matrix') from .coo import coo_matrix d = coo_matrix(arg1, dtype=dtype).todok() self.update(d) self.shape = arg1.shape self.dtype = d.dtype def getnnz(self, axis=None): if axis is not None: raise NotImplementedError("getnnz over an axis is not implemented " "for DOK format") return dict.__len__(self) def count_nonzero(self): return sum(x != 0 for x in itervalues(self)) getnnz.__doc__ = spmatrix.getnnz.__doc__ count_nonzero.__doc__ = spmatrix.count_nonzero.__doc__ def __len__(self): return dict.__len__(self) def get(self, key, default=0.): """This overrides the dict.get method, providing type checking but otherwise equivalent functionality. """ try: i, j = key assert isintlike(i) and isintlike(j) except (AssertionError, TypeError, ValueError): raise IndexError('index must be a pair of integers') if (i < 0 or i >= self.shape[0] or j < 0 or j >= self.shape[1]): raise IndexError('index out of bounds') return dict.get(self, key, default) def __getitem__(self, index): """If key=(i,j) is a pair of integers, return the corresponding element. If either i or j is a slice or sequence, return a new sparse matrix with just these elements. """ zero = self.dtype.type(0) i, j = self._unpack_index(index) i_intlike = isintlike(i) j_intlike = isintlike(j) if i_intlike and j_intlike: # Scalar index case i = int(i) j = int(j) if i < 0: i += self.shape[0] if i < 0 or i >= self.shape[0]: raise IndexError('index out of bounds') if j < 0: j += self.shape[1] if j < 0 or j >= self.shape[1]: raise IndexError('index out of bounds') return dict.get(self, (i,j), zero) elif ((i_intlike or isinstance(i, slice)) and (j_intlike or isinstance(j, slice))): # Fast path for slicing very sparse matrices i_slice = slice(i, i+1) if i_intlike else i j_slice = slice(j, j+1) if j_intlike else j i_indices = i_slice.indices(self.shape[0]) j_indices = j_slice.indices(self.shape[1]) i_seq = xrange(*i_indices) j_seq = xrange(*j_indices) newshape = (len(i_seq), len(j_seq)) newsize = _prod(newshape) if len(self) < 2*newsize and newsize != 0: # Switch to the fast path only when advantageous # (count the iterations in the loops, adjust for complexity) # # We also don't handle newsize == 0 here (if # i/j_intlike, it can mean index i or j was out of # bounds) return self._getitem_ranges(i_indices, j_indices, newshape) i, j = self._index_to_arrays(i, j) if i.size == 0: return dok_matrix(i.shape, dtype=self.dtype) min_i = i.min() if min_i < -self.shape[0] or i.max() >= self.shape[0]: raise IndexError('index (%d) out of range -%d to %d)' % (i.min(), self.shape[0], self.shape[0]-1)) if min_i < 0: i = i.copy() i[i < 0] += self.shape[0] min_j = j.min() if min_j < -self.shape[1] or j.max() >= self.shape[1]: raise IndexError('index (%d) out of range -%d to %d)' % (j.min(), self.shape[1], self.shape[1]-1)) if min_j < 0: j = j.copy() j[j < 0] += self.shape[1] newdok = dok_matrix(i.shape, dtype=self.dtype) for a in xrange(i.shape[0]): for b in xrange(i.shape[1]): v = dict.get(self, (i[a,b], j[a,b]), zero) if v != 0: dict.__setitem__(newdok, (a, b), v) return newdok def _getitem_ranges(self, i_indices, j_indices, shape): # performance golf: we don't want Numpy scalars here, they are slow i_start, i_stop, i_stride = map(int, i_indices) j_start, j_stop, j_stride = map(int, j_indices) newdok = dok_matrix(shape, dtype=self.dtype) for (ii, jj) in self.keys(): # ditto for numpy scalars ii = int(ii) jj = int(jj) a, ra = divmod(ii - i_start, i_stride) if a < 0 or a >= shape[0] or ra != 0: continue b, rb = divmod(jj - j_start, j_stride) if b < 0 or b >= shape[1] or rb != 0: continue dict.__setitem__(newdok, (a, b), dict.__getitem__(self, (ii, jj))) return newdok def __setitem__(self, index, x): if isinstance(index, tuple) and len(index) == 2: # Integer index fast path i, j = index if (isintlike(i) and isintlike(j) and 0 <= i < self.shape[0] and 0 <= j <
mozaik-association/mozaik
mozaik_account/tests/__init__.py
Python
agpl-3.0
153
0
# Copyright 2018 ACSONE SA/NV # License AGPL-3.0 or l
ater
(http://www.gnu.org/licenses/agpl). from . import test_accounting from . import test_donation
youtube/cobalt
starboard/build/run_bash.py
Python
bsd-3-clause
1,290
0.003101
#!/usr/bin/env python3 # # Copyright 2021 The Cobalt Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Wrapper script to run arbitrary bash files from GN. This script should be used only when absolutely necessary and never in a c
ross-platform way (that is, it should only be used for an action on a particular platform, not an platform-independent target). """ import logging import subprocess import sys if __name__ == '__main__': logging_format = '[%(levelname)s:%(filename)s:%(lineno)s] %(message)s' logging.basicConfig( level=logging.INFO, format=logging_format, datefmt='%H:%M:%S') logging.warni
ng('Calling a bash process during GN build. ' 'Avoid doing this whenever possible.') sys.exit(subprocess.call(sys.argv[1:]))
gigglearrows/anniesbot
alembic/versions/4db5dc4bc98_added_a_table_for_timed_commands.py
Python
mit
1,034
0.014507
"""Added a table for timed commands Revision ID: 4db5dc4bc98 Revises: 514f4
b9bc74 Create Date: 2015-12-23 00:00:59.156496 """ # revision identifiers, used by Alembic. revision = '4db5dc4bc98' down_revision = '514f4b9bc74' branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('tb_timer', sa.Column('id',
sa.Integer(), nullable=False), sa.Column('name', sa.String(length=256), nullable=False), sa.Column('action', mysql.TEXT(), nullable=False), sa.Column('interval_online', sa.Integer(), nullable=False), sa.Column('interval_offline', sa.Integer(), nullable=False), sa.Column('enabled', sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('tb_timer') ### end Alembic commands ###
changsimon/trove
trove/tests/api/mgmt/malformed_json.py
Python
apache-2.0
12,085
0
# Copyright 2013 OpenStack Foundation # Copyright 2013 Rackspace Hosting # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from collections import deque from proboscis import test from proboscis import asserts from proboscis import after_class from proboscis import before_class from trove.tests.config import CONFIG from trove.tests.api.instances import instance_info from trove.tests.api.instances import VOLUME_SUPPORT from trove.tests.util.users import Requirements from trove.tests.util import assert_contains from trove.tests.util import create_dbaas_client from trove.common.utils import poll_until @test(groups=["dbaas.api.mgmt.malformed_json"]) class MalformedJson(object): @before_class def setUp(self): self.reqs = Requirements(is_admin=False) self.user = CONFIG.users.find_user(self.reqs) self.dbaas = create_dbaas_client(self.user) volume = None if VOLUME_SUPPORT: volume = {"size": 1} self.instance = self.dbaas.instances.create( name="qe_instance", flavor_id=instance_info.dbaas_flavor_href, volume=volume, databases=[{"name": "firstdb", "character_set": "latin2", "collate": "latin2_general_ci"}]) @after_class def tearDown(self): self.dbaas.instances.delete(self.instance) @test def test_bad_instance_data(self): databases = "foo" users = "bar" try: self.dbaas.instances.create("bad_instance", 3, 3, databases=databases, users=users) except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status asserts.assert_equal(httpCode, 400, "Create instance failed with code %s," " exception %s" % (httpCode, e)) databases = "u'foo'" users = "u'bar'" assert_contains( e.message, ["Validation error:", "instance['databases'] %s is not of type 'array'" % databases, "instance['users'] %s is not of type 'array'" % users, "instance['volume'] 3 is not of type 'object'"]) @test def test_bad_database_data(self): _bad_db_data = "{foo}" try: self.dbaas.databases.create(self.instance.id, _bad_db_data) except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status asserts.assert_equal(httpCode, 400, "Create database failed with code %s, " "exception %s" % (httpCode, e)) _bad_db_data = "u'{foo}'" asserts.assert_equal(e.message, "Validation error: " "databases %s is not of type 'array'" %
_bad_db_data) @test def test_bad_user_data(self): def format_path(values): values = list(values) msg = "%s%s" % (values[0], ''.join(['[%r]' % i for i in values[1:]])) return msg _user =
[] _user_name = "F343jasdf" _user.append({"name12": _user_name, "password12": "password"}) try: self.dbaas.users.create(self.instance.id, _user) except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status asserts.assert_equal(httpCode, 400, "Create user failed with code %s, " "exception %s" % (httpCode, e)) err_1 = format_path(deque(('users', 0))) assert_contains( e.message, ["Validation error:", "%(err_1)s 'name' is a required property" % {'err_1': err_1}, "%(err_1)s 'password' is a required property" % {'err_1': err_1}]) @test def test_bad_resize_instance_data(self): def _check_instance_status(): inst = self.dbaas.instances.get(self.instance) if inst.status == "ACTIVE": return True else: return False poll_until(_check_instance_status) try: self.dbaas.instances.resize_instance(self.instance.id, "bad data") except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status asserts.assert_equal(httpCode, 400, "Resize instance failed with code %s, " "exception %s" % (httpCode, e)) @test def test_bad_resize_vol_data(self): def _check_instance_status(): inst = self.dbaas.instances.get(self.instance) if inst.status == "ACTIVE": return True else: return False poll_until(_check_instance_status) data = "bad data" try: self.dbaas.instances.resize_volume(self.instance.id, data) except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status asserts.assert_equal(httpCode, 400, "Resize instance failed with code %s, " "exception %s" % (httpCode, e)) data = "u'bad data'" assert_contains( e.message, ["Validation error:", "resize['volume']['size'] %s is not valid under " "any of the given schemas" % data, "%s is not of type 'integer'" % data, "%s does not match '[0-9]+'" % data]) @test def test_bad_change_user_password(self): password = "" users = [{"name": password}] def _check_instance_status(): inst = self.dbaas.instances.get(self.instance) if inst.status == "ACTIVE": return True else: return False poll_until(_check_instance_status) try: self.dbaas.users.change_passwords(self.instance, users) except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status asserts.assert_equal(httpCode, 400, "Change usr/passwd failed with code %s, " "exception %s" % (httpCode, e)) password = "u''" assert_contains( e.message, ["Validation error: users[0] 'password' " "is a required property", "users[0]['name'] %s is too short" % password, "users[0]['name'] %s does not match " "'^.*[0-9a-zA-Z]+.*$'" % password]) @test def test_bad_grant_user_access(self): dbs = [] def _check_instance_status(): inst = self.dbaas.instances.get(self.instance) if inst.status == "ACTIVE": return True else: return False poll_until(_check_instance_status) try: self.dbaas.users.grant(self.instance, self.user, dbs) except Exception as e: resp, body = self.dbaas.client.last_response httpCode = resp.status assert
ity/pants
tests/python/pants_test/engine/test_fs.py
Python
apache-2.0
7,436
0.007934
# coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_
statement) import os import unittest from abc import abstractmethod from contextlib import contextmanager from pants.base.scm_project_tree import ScmProjectTree from pants.engine.fs import (Dir, DirectoryListing, Dirs, FileContent, Files, Link, Pat
h, PathGlobs, ReadLink, Stat, Stats) from pants.engine.nodes import FilesystemNode from pants.util.meta import AbstractClass from pants_test.engine.scheduler_test_base import SchedulerTestBase from pants_test.testutils.git_util import MIN_REQUIRED_GIT_VERSION, git_version, initialize_repo class FSTestBase(SchedulerTestBase, AbstractClass): _original_src = os.path.join(os.path.dirname(__file__), 'examples/fs_test') @abstractmethod @contextmanager def mk_project_tree(self, build_root_src): """Construct a ProjectTree for the given src path.""" pass def specs(self, ftype, relative_to, *filespecs): return PathGlobs.create_from_specs(ftype, relative_to, filespecs) def assert_walk(self, ftype, filespecs, files): with self.mk_project_tree(self._original_src) as project_tree: scheduler, storage = self.mk_scheduler(project_tree=project_tree) result = self.execute(scheduler, storage, Stat, self.specs(ftype, '', *filespecs))[0] self.assertEquals(set(files), set([p.path for p in result])) def assert_content(self, filespecs, expected_content): with self.mk_project_tree(self._original_src) as project_tree: scheduler, storage = self.mk_scheduler(project_tree=project_tree) result = self.execute(scheduler, storage, FileContent, self.specs(Files, '', *filespecs))[0] def validate(e): self.assertEquals(type(e), FileContent) return True actual_content = {f.path: f.content for f in result if validate(f)} self.assertEquals(expected_content, actual_content) def assert_fsnodes(self, ftype, filespecs, subject_product_pairs): with self.mk_project_tree(self._original_src) as project_tree: scheduler, storage = self.mk_scheduler(project_tree=project_tree) request = self.execute_request(scheduler, storage, Stat, self.specs(ftype, '', *filespecs)) # Validate that FilesystemNodes for exactly the given subjects are reachable under this # request. fs_nodes = [n for n, _ in scheduler.product_graph.walk(roots=request.roots) if type(n) is FilesystemNode] self.assertEquals(set((n.subject, n.product) for n in fs_nodes), set(subject_product_pairs)) def test_walk_literal(self): self.assert_walk(Files, ['4.txt'], ['4.txt']) self.assert_walk(Files, ['a/b/1.txt', 'a/b/2'], ['a/b/1.txt', 'a/b/2']) self.assert_walk(Files, ['c.ln/2'], ['a/b/2']) self.assert_walk(Files, ['d.ln/b/1.txt'], ['a/b/1.txt']) self.assert_walk(Files, ['a/3.txt'], ['a/3.txt']) self.assert_walk(Files, ['z.txt'], []) def test_walk_literal_directory(self): self.assert_walk(Dirs, ['c.ln'], ['a/b']) self.assert_walk(Dirs, ['a'], ['a']) self.assert_walk(Dirs, ['a/b'], ['a/b']) self.assert_walk(Dirs, ['z'], []) self.assert_walk(Dirs, ['4.txt', 'a/3.txt'], []) def test_walk_siblings(self): self.assert_walk(Files, ['*.txt'], ['4.txt']) self.assert_walk(Files, ['a/b/*.txt'], ['a/b/1.txt']) self.assert_walk(Files, ['c.ln/*.txt'], ['a/b/1.txt']) self.assert_walk(Files, ['a/b/*'], ['a/b/1.txt', 'a/b/2']) self.assert_walk(Files, ['*/0.txt'], []) def test_walk_recursive(self): self.assert_walk(Files, ['**/*.txt.ln'], ['4.txt']) self.assert_walk(Files, ['**/*.txt'], ['a/3.txt', 'a/b/1.txt']) self.assert_walk(Files, ['*.txt', '**/*.txt'], ['a/3.txt', 'a/b/1.txt', '4.txt']) self.assert_walk(Files, ['*', '**/*'], ['a/3.txt', 'a/b/1.txt', '4.txt', 'a/b/2']) self.assert_walk(Files, ['**/3.t*t'], ['a/3.txt']) self.assert_walk(Files, ['**/*.zzz'], []) def test_walk_recursive_directory(self): self.assert_walk(Dirs, ['*'], ['a', 'a/b']) self.assert_walk(Dirs, ['*/*'], ['a/b']) self.assert_walk(Dirs, ['**/*'], ['a/b']) self.assert_walk(Dirs, ['*/*/*'], []) def test_files_content_literal(self): self.assert_content(['4.txt'], {'4.txt': 'four\n'}) self.assert_content(['a/4.txt.ln'], {'4.txt': 'four\n'}) def test_files_content_directory(self): with self.assertRaises(Exception): self.assert_content(['a/b/'], {'a/b/': 'nope\n'}) with self.assertRaises(Exception): self.assert_content(['a/b'], {'a/b': 'nope\n'}) def test_nodes_file(self): self.assert_fsnodes(Files, ['4.txt'], [ (Path('4.txt'), Stats), ]) def test_nodes_symlink_file(self): self.assert_fsnodes(Files, ['c.ln/2'], [ (Link('c.ln'), ReadLink), (Path('c.ln'), Stats), (Path('a/b'), Stats), (Path('a/b/2'), Stats), ]) self.assert_fsnodes(Files, ['d.ln/b/1.txt'], [ (Path('d.ln'), Stats), (Link('d.ln'), ReadLink), (Path('a'), Stats), (Path('a/b'), Stats), (Path('a/b/1.txt'), Stats), ]) def test_nodes_symlink_globbed_dir(self): self.assert_fsnodes(Files, ['*/2'], [ # Glob the root. (Dir(''), DirectoryListing), # Stat each entry. (Path('a'), Stats), (Path('c.ln'), Stats), (Path('d.ln'), Stats), (Path('4.txt'), Stats), # Read links to determine whether they're actually directories. (Link('c.ln'), ReadLink), (Link('d.ln'), ReadLink), # Stat the detination of one link (the other was already stat'd during the initial list). (Path('a/b'), Stats), # Look up the literal in each path. (Path('a/b/2'), Stats), (Path('a/2'), Stats), ]) def test_nodes_symlink_globbed_file(self): self.assert_fsnodes(Files, ['d.ln/b/*.txt'], [ # NB: Needs to stat every path on the way down to track whether # it is traversing a symlink. (Path('d.ln'), Stats), (Link('d.ln'), ReadLink), (Path('a'), Stats), (Path('a/b'), Stats), (Dir('a/b'), DirectoryListing), (Path('a/b/2'), Stats), (Path('a/b/1.txt'), Stats), ]) class PosixFSTest(unittest.TestCase, FSTestBase): @contextmanager def mk_project_tree(self, build_root_src): yield self.mk_fs_tree(build_root_src) @unittest.skipIf(git_version() < MIN_REQUIRED_GIT_VERSION, 'The GitTest requires git >= {}.'.format(MIN_REQUIRED_GIT_VERSION)) class GitFSTest(unittest.TestCase, FSTestBase): @contextmanager def mk_project_tree(self, build_root_src): # Use mk_fs_tree only to feed the files for the git repo, not using its FileSystemProjectTree. worktree = self.mk_fs_tree(build_root_src).build_root with initialize_repo(worktree) as git_repo: yield ScmProjectTree(worktree, git_repo, 'HEAD') @unittest.skip('https://github.com/pantsbuild/pants/issues/3281') def test_walk_recursive(self): super(GitFSTest, self).test_walk_recursive() @unittest.skip('https://github.com/pantsbuild/pants/issues/3281') def test_files_content_literal(self): super(GitFSTest, self).test_files_content_literal()
schleichdi2/OPENNFR-6.3-CORE
bitbake/lib/bb/ui/uievent.py
Python
gpl-2.0
4,475
0.00514
# # Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer # Copyright (C) 2006 - 2007 Richard Purdie # # SPDX-License-Identifier: GPL-2.0-only # """ Use this class to fork off a thread to recieve event callbacks from the bitbake server and queue them for the UI to process. This process must be used to avoid client/server deadlocks. """ import socket, threading, pickle, collections from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler class BBUIEventQueue: def __init__(self, BBServer, clientinfo=("localhost, 0")): self.eventQueue = [] self.eventQueueLock = threading.Lock() self.eventQueueNotify = threading.Event() self.BBServer = BBServer self.clientinfo = clientinfo server = UIXMLRPCServer(self.clientinfo) self.host, self.port = server.socket.getsockname() server.register_function( self.system_quit, "event.quit" ) server.register_function( self.send_event, "event.sendpickle" ) server.socket.settimeout(1) self.EventHandle = None # the event handler registration may fail here due to cooker being in invalid state # this is a transient situation, and we should retry a couple of times before # giving up for count_tries in range(5): ret = self.BBServer.registerEventHandler(self.host, self.port) if isinstance(ret, collections.Iterable): self.EventHandle, error = ret else: self.EventHandle = ret error = ""
if self.EventHandle != None: break errmsg = "Could not register UI event handler. Error: %s, host %s, "\ "port %d" % (err
or, self.host, self.port) bb.warn("%s, retry" % errmsg) import time time.sleep(1) else: raise Exception(errmsg) self.server = server self.t = threading.Thread() self.t.setDaemon(True) self.t.run = self.startCallbackHandler self.t.start() def getEvent(self): self.eventQueueLock.acquire() if len(self.eventQueue) == 0: self.eventQueueLock.release() return None item = self.eventQueue.pop(0) if len(self.eventQueue) == 0: self.eventQueueNotify.clear() self.eventQueueLock.release() return item def waitEvent(self, delay): self.eventQueueNotify.wait(delay) return self.getEvent() def queue_event(self, event): self.eventQueueLock.acquire() self.eventQueue.append(event) self.eventQueueNotify.set() self.eventQueueLock.release() def send_event(self, event): self.queue_event(pickle.loads(event)) def startCallbackHandler(self): self.server.timeout = 1 bb.utils.set_process_name("UIEventQueue") while not self.server.quit: try: self.server.handle_request() except Exception as e: import traceback logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc())) self.server.server_close() def system_quit( self ): """ Shut down the callback thread """ try: self.BBServer.unregisterEventHandler(self.EventHandle) except: pass self.server.quit = True class UIXMLRPCServer (SimpleXMLRPCServer): def __init__( self, interface ): self.quit = False SimpleXMLRPCServer.__init__( self, interface, requestHandler=SimpleXMLRPCRequestHandler, logRequests=False, allow_none=True, use_builtin_types=True) def get_request(self): while not self.quit: try: sock, addr = self.socket.accept() sock.settimeout(1) return (sock, addr) except socket.timeout: pass return (None, None) def close_request(self, request): if request is None: return SimpleXMLRPCServer.close_request(self, request) def process_request(self, request, client_address): if request is None: return SimpleXMLRPCServer.process_request(self, request, client_address)
dmlb2000/pacifica-archiveinterface
tests/posix_test.py
Python
lgpl-3.0
6,379
0.000314
#!/usr/bin/python # -*- coding: utf-8 -*- """File used to unit test the pacifica archive interface.""" import unittest import os from stat import ST_MODE from six import PY2 from pacifica.archiveinterface.archive_utils import bytes_type from pacifica.archiveinterface.backends.posix.archive import PosixBackendArchive import pacifica.archiveinterface.config as pa_config from .common_setup_test import SetupTearDown class TestPosixBackendArchive(unittest.TestCase, SetupTearDown): """Test the Posix backend archive.""" def test_posix_backend_create(self): """Test creating a posix backend.""" backend = PosixBackendArchive('/tmp') self.assertTrue(isinstance(backend, PosixBackendArchive)) # easiest way to unit test is look at class variable # pylint: disable=protected-access self.assertEqual(backend._prefix, '/tmp') # pylint: enable=protected-access def test_posix_backend_open(self): """Test opening a file from posix backend.""" filepath = '1234' mode = 'w' backend = PosixBackendArchive('/tmp') my_file = backend.open(filepath, mode) self.assertTrue(isinstance(my_file, PosixBackendArchive)) # easiest way to unit test is look at class variable # pylint: disable=protected-access self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile') # pylint: enable=protected-access my_file.close() def test_posix_backend_stage(self): """Test staging a file from posix backend.""" filepath = '1234' mode = 'w' backend = PosixBackendArchive('/tmp') my_file = backend.open(filepath, mode) my_file.stage() # pylint: disable=protected-access self.assertTrue(my_file._file._staged) # pylint: enable=protected-access my_file.close() def test_posix_backend_open_twice(self): """Test opening a file from posix backend twice.""" filepath = '1234' mode = 'w' backend = PosixBackendArchive('/tmp') my_file = backend.open(filepath, mode) my_file = backend.open(filepath, mode) self.assertTrue(isinstance(my_file, PosixBackendArchive)) # easiest way to unit test is look at class variable # pylint: disable=protected-access self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile') # pylint: enable=protected-access my_file.close() def test_posix_backend_open_id2f(self): """Test opening a file from posix backend twice.""" backend = PosixBackendArchive('/tmp') mode = 'w' my_file = backend.open('/a/b/d', mode) temp_cfg_file = pa_config.CONFIG_FILE pa_config.CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test_configs', 'posix-id2filename.cfg') backend = PosixBackendArchive('/tmp') my_file = backend.open(12345, mode) my_file.write('this is file 12345') my_file.close() # pylint: disable=protected-access my_file.patch(123456789, '/tmp{}'.format(my_file._id2filename(12345))) # pylint: enable=protected-access my_file = backend.open(123456789, 'r') text = my_file.read(-1) pa_config.CONFIG_FILE = temp_cfg_file self.assertTrue(isinstance(my_file, PosixBackendArchive)) self.assertEqual(bytes_type('this is file 12345'), text) my_file.close() def test_posix_backend_close(self): """Test closing a file from posix backend.""" filepath = '1234' mode = 'w' backend = PosixBackendArchive('/tmp/') my_file = backend.open(filepath, mode) # easiest way to unit test is look at class variable # pylint: disable=protected-access self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile') my_file.close() self.assertEqual(backend._file, None) # pylint: enable=protected-access def test_posix_backend_write(self): """Test writing a file from posix backend.""" filepath = '1234' mode = 'w' backend = PosixBackendArchive('/tmp/') my_file = backend.open(filepath, mode) error = my_file.write('i am a test string') if PY2:
self.assertEqual(error,
None) else: self.assertEqual(error, 18) my_file.close() def test_posix_file_mod_time(self): """Test the correct setting of a file mod time.""" filepath = '1234' mode = 'w' backend = PosixBackendArchive('/tmp/') my_file = backend.open(filepath, mode) my_file.close() my_file.set_mod_time(1000000) my_file = backend.open(filepath, 'r') status = my_file.status() my_file.close() self.assertEqual(status.mtime, 1000000) def test_posix_file_permissions(self): """Test the correct setting of a file mod time.""" filepath = '12345' mode = 'w' backend = PosixBackendArchive('/tmp/') my_file = backend.open(filepath, mode) my_file.close() my_file.set_file_permissions() statinfo = oct(os.stat('/tmp/12345')[ST_MODE])[-3:] self.assertEqual(statinfo, '444') def test_posix_backend_read(self): """Test reading a file from posix backend.""" self.test_posix_backend_write() filepath = '1234' mode = 'r' backend = PosixBackendArchive('/tmp/') my_file = backend.open(filepath, mode) buf = my_file.read(-1) self.assertEqual(buf, bytes_type('i am a test string')) my_file.close() def test_patch(self): """Test patching file.""" old_path = '/tmp/1234' backend = PosixBackendArchive('/tmp') my_file = backend.open('1234', 'w') my_file.close() backend.patch('5678', '/tmp/1234') # Error would be thrown on patch so nothing to assert self.assertEqual(old_path, '/tmp/1234') def test_seek(self): """Test patching file.""" backend = PosixBackendArchive('/tmp') my_file = backend.open('1234', 'w') my_file.write('something') my_file.close() my_file = backend.open('1234', 'r') my_file.seek(4) data = my_file.read(-1).decode('utf8') self.assertEqual(data, 'thing')
artcz/euler
problems/02/2.py
Python
mit
1,241
0.001612
# coding: utf-8 """ Each new term in the Fibonacci sequence is generated by adding the previous two terms. By starting with 1 and 2, the first 10 terms will be: 1, 2, 3, 5, 8,
13, 21, 34, 55, 89, ... By considering the terms in the Fibonacci sequence whose values do not exceed four million, find the sum of the even-valued terms. """ import itertools import time def oneliner(): fib = lambda x: fib(x-1)+fib(x-2) if x > 2 else 1 return sum( fib(z) for z in itertools.takewhile( lambda n: fib(n) <= 4*10**6, itertools.co
unt() ) if fib(z) % 2 == 0 ) def impl1(): oldfib = 1 fib = 1 even = [] while fib <= 4*10**6: if fib % 2 == 0: even.append(fib) fib, oldfib = fib + oldfib, fib return sum(even) def impl2(): prev, fib = 1, 1 _sum = 0 while fib <= 4*10**6: if fib % 2 == 0: _sum += fib fib, prev = fib + prev, fib return _sum if __name__ == "__main__": def timeit(function): t1 = time.time() output = function() t2 = time.time() return output, t2-t1 print timeit(impl1) print timeit(impl2) print timeit(oneliner)
jorik041/dfvfs
dfvfs/file_io/qcow_file_io.py
Python
apache-2.0
1,285
0.003891
# -*- coding: utf-8 -*- """The QCOW image file-like object.""" import pyqcow from dfvfs import dependencies from dfvfs.file_io import file_object_io from dfvfs.lib import errors from dfvfs.resolver import resolver dependencies.CheckModuleVersion(u'pyqcow') class QcowFile(file_object_io.FileObjectIO): """Class that implements a file-like object using pyqcow.""" def _OpenFileObject(self, path_spec): """Opens the file-like object defined by path specification. Args: path_spec: the path specification (instance of path.PathSpec). Returns: A file-like object. Raises: PathSpecError: if the path specification is incorrect. """ if not path_spec.HasParent(): raise errors.PathSpecError( u'Un
supported path specification without parent.') file_object = resolver.Resolver.OpenFileObject( path_spec.parent, resolver_context=self._resolver_context) qcow_file = pyqcow.file() qcow_file.open_file_object(file_
object) return qcow_file def get_size(self): """Returns the size of the file-like object. Raises: IOError: if the file-like object has not been opened. """ if not self._is_open: raise IOError(u'Not opened.') return self._file_object.get_media_size()
jtpaasch/armyguys
venv/bin/rst2xetex.py
Python
mit
811
0.001233
#!/home/jt/code/armyguys/venv/bin/python3.4 # $Id: rst2xetex.py 7038 2011-05-19 09:12:02Z milde $ # Author: Guenter Milde # Copyright: This module has been placed in the public domain. """ A minimal front end to the Docutils Publisher, producing XeLaTeX source code. """ try: import locale locale.setlocale(locale.LC_ALL, '') except: pass from docutils.core import publish_cmdline description = ('Generates XeLaTeX documents from
standalone reStructuredText ' 'sources. ' 'Reads from <source> (default is stdin) and writes to ' '<destination> (default is stdout). See '
'<http://docutils.sourceforge.net/docs/user/latex.html> for ' 'the full reference.') publish_cmdline(writer_name='xetex', description=description)
Akrog/cinder
cinder/api/v1/snapshots.py
Python
apache-2.0
8,045
0
# Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # u
nder the License. """The volumes snapshots api.""" from oslo_utils import strutils import webob from webob import exc from cinder.api import common from cinder.api.openstack import wsgi from cinder.api import xmlutil from cinder import exception from cinder.i18n import _, _LI from cinder.openstack.common import log as logging from cinder import utils from cinder import volume LOG = logging.getLogger(__name__) def _translate_snapshot_detail_view(context, snapshot): """Maps keys for snapshots details view.""" d = _translate_snapshot_summary_view(context, snapshot) # NOTE(gagupta): No additional data / lookups at the moment return d def _translate_snapshot_summary_view(context, snapshot): """Maps keys for snapshots summary view.""" d = {} d['id'] = snapshot['id'] d['created_at'] = snapshot['created_at'] d['display_name'] = snapshot['display_name'] d['display_description'] = snapshot['display_description'] d['volume_id'] = snapshot['volume_id'] d['status'] = snapshot['status'] d['size'] = snapshot['volume_size'] if snapshot.get('metadata') and isinstance(snapshot.get('metadata'), dict): d['metadata'] = snapshot['metadata'] else: d['metadata'] = {} return d def make_snapshot(elem): elem.set('id') elem.set('status') elem.set('size') elem.set('created_at') elem.set('display_name') elem.set('display_description') elem.set('volume_id') elem.append(common.MetadataTemplate()) class SnapshotTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('snapshot', selector='snapshot') make_snapshot(root) return xmlutil.MasterTemplate(root, 1) class SnapshotsTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('snapshots') elem = xmlutil.SubTemplateElement(root, 'snapshot', selector='snapshots') make_snapshot(elem) return xmlutil.MasterTemplate(root, 1) class SnapshotsController(wsgi.Controller): """The Snapshots API controller for the OpenStack API.""" def __init__(self, ext_mgr=None): self.volume_api = volume.API() self.ext_mgr = ext_mgr super(SnapshotsController, self).__init__() @wsgi.serializers(xml=SnapshotTemplate) def show(self, req, id): """Return data about the given snapshot.""" context = req.environ['cinder.context'] try: snapshot = self.volume_api.get_snapshot(context, id) req.cache_db_snapshot(snapshot) except exception.NotFound: raise exc.HTTPNotFound() return {'snapshot': _translate_snapshot_detail_view(context, snapshot)} def delete(self, req, id): """Delete a snapshot.""" context = req.environ['cinder.context'] LOG.info(_LI("Delete snapshot with id: %s"), id, context=context) try: snapshot = self.volume_api.get_snapshot(context, id) self.volume_api.delete_snapshot(context, snapshot) except exception.NotFound: raise exc.HTTPNotFound() return webob.Response(status_int=202) @wsgi.serializers(xml=SnapshotsTemplate) def index(self, req): """Returns a summary list of snapshots.""" return self._items(req, entity_maker=_translate_snapshot_summary_view) @wsgi.serializers(xml=SnapshotsTemplate) def detail(self, req): """Returns a detailed list of snapshots.""" return self._items(req, entity_maker=_translate_snapshot_detail_view) def _items(self, req, entity_maker): """Returns a list of snapshots, transformed through entity_maker.""" context = req.environ['cinder.context'] # pop out limit and offset , they are not search_opts search_opts = req.GET.copy() search_opts.pop('limit', None) search_opts.pop('offset', None) # filter out invalid option allowed_search_options = ('status', 'volume_id', 'display_name') utils.remove_invalid_filter_options(context, search_opts, allowed_search_options) snapshots = self.volume_api.get_all_snapshots(context, search_opts=search_opts) limited_list = common.limited(snapshots, req) req.cache_db_snapshots(limited_list) res = [entity_maker(context, snapshot) for snapshot in limited_list] return {'snapshots': res} @wsgi.serializers(xml=SnapshotTemplate) def create(self, req, body): """Creates a new snapshot.""" kwargs = {} context = req.environ['cinder.context'] if not self.is_valid_body(body, 'snapshot'): raise exc.HTTPUnprocessableEntity() snapshot = body['snapshot'] kwargs['metadata'] = snapshot.get('metadata', None) try: volume_id = snapshot['volume_id'] except KeyError: msg = _("'volume_id' must be specified") raise exc.HTTPBadRequest(explanation=msg) try: volume = self.volume_api.get(context, volume_id) except exception.NotFound: raise exc.HTTPNotFound() force = snapshot.get('force', False) msg = _("Create snapshot from volume %s") LOG.info(msg, volume_id, context=context) if not utils.is_valid_boolstr(force): msg = _("Invalid value '%s' for force. ") % force raise exception.InvalidParameterValue(err=msg) if strutils.bool_from_string(force): new_snapshot = self.volume_api.create_snapshot_force( context, volume, snapshot.get('display_name'), snapshot.get('display_description'), **kwargs) else: new_snapshot = self.volume_api.create_snapshot( context, volume, snapshot.get('display_name'), snapshot.get('display_description'), **kwargs) req.cache_db_snapshot(new_snapshot) retval = _translate_snapshot_detail_view(context, new_snapshot) return {'snapshot': retval} @wsgi.serializers(xml=SnapshotTemplate) def update(self, req, id, body): """Update a snapshot.""" context = req.environ['cinder.context'] if not body: raise exc.HTTPUnprocessableEntity() if 'snapshot' not in body: raise exc.HTTPUnprocessableEntity() snapshot = body['snapshot'] update_dict = {} valid_update_keys = ( 'display_name', 'display_description', ) for key in valid_update_keys: if key in snapshot: update_dict[key] = snapshot[key] try: snapshot = self.volume_api.get_snapshot(context, id) self.volume_api.update_snapshot(context, snapshot, update_dict) except exception.NotFound: raise exc.HTTPNotFound() snapshot.update(update_dict) req.cache_db_snapshot(snapshot) return {'snapshot': _translate_snapshot_detail_view(context, snapshot)} def create_resource(ext_mgr): return wsgi.Resource(SnapshotsController(ext_mgr))
races1986/SafeLanguage
CEM/tests/test_xmlreader.py
Python
epl-1.0
1,809
0.00387
import xml.sax import unittest import test_utils import xmlreader import os path = os.path.dirname(os.path.abspath(__file__) ) class XmlReaderTestCase(unittest.TestCase): def test_XmlDumpAllRevs(self): pages = [r for r in xmlreader.XmlDump(path + "/data/article-pear.xml", allrevisions=True).parse()] self.assertEquals(4, len(pages)) self.assertEquals(u"Automated conversion", pages[0].comment) self.assertEquals(u"Pear", pages[0].title) self.assertEquals(u"24278", pages[0].id) self.assertTrue(pages[0].text.startswith('Pears are [[tree]]s of')) self.assertEquals(u"Quercusrobur", pages[1].username) self.assertEquals(u"Pear", pages[0].title) def test_XmlDumpFirstRev(self): pages = [r for r in xmlreader.XmlDump(path + "/data/article-pear.xml").parse()] self.assertEquals(1, len(pages)) self.assertEquals(u"Automated conversion", pages[0].comment) self.assertEquals(u"Pear", pages[0].title) self.assertEquals(u"24278", pages[0].id) self.assertTrue(pages[0].text.startswith('Pears are [[tree]]s of')) self.assertTrue(not pages[0].isredirect) def test_XmlDumpRedirect(self): pages = [r for r in xmlreader.XmlDump(path + "/data/article-pyrus.xml").parse()] self.assertTrue(pages[0].isredirec
t) def test_MediaWikiXmlHandler(self): handler = xmlreader.MediaWikiXmlHandler() pages = [] def pageDone(page): pages.append(page) handler.setCallback(pageDone) xml.sax.parse(path + "/data/article-pear.xml", handler) self.assertEquals(u"Pear", pages[0].title) self.assertEquals(4, len(pages))
self.assertNotEquals("", pages[0].comment) if __name__ == '__main__': unittest.main()
HudsonWerks/OLED-SSD1306
ssd1306/fonts/stencil_24.py
Python
lgpl-3.0
98,068
0.156068
# coding=utf-8 # Module stencil_24 # generated from Stencil 18pt name = "Stencil 24" start_char = '!' end_char = chr(127) char_height = 24 space_width = 12 gap_width = 3 bitmaps = ( # @0 '!' (5 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0x70, # OOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0x70, # OOO 0x70, # OOO 0x70, # OOO 0x30, # OO 0x20, # O 0x20, # O 0x70, # OOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0x70, # OOO 0x00, # 0x00, # 0x00, # 0x00, # # @24 '"' (8 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0xEF, # OOO OOOO 0xE7, # OOO OOO 0xE7, # OOO OOO 0xE7, # OOO OOO 0xE6, # OOO OO 0x66, # OO OO 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # # @48 '#' (14 pixels wide) 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x06, 0x30, # OO OO 0x06, 0x30, # OO OO 0x06, 0x30, # OO OO 0x0E, 0x70, # OOO OOO 0x3F, 0x7C, # OOOOOO OOOOO 0x7F, 0x7C, # OOOOOOO OOOOO 0x0C, 0x60, # OO OO 0x0C, 0x60, # OO OO 0x18, 0xC0, # OO OO 0x18, 0xC0, # OO OO 0xFE, 0xF8, # OOOOOOO OOOOO 0xFE, 0xF0, # OOOOOOO OOOO 0x39, 0x80, # OOO OO 0x31, 0x80, # OO OO 0x31, 0x80, # OO OO 0x31, 0x80, # OO OO 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # # @96 '$' (11 pixels wide) 0x00, 0x00, # 0x00, 0x00, # 0x04, 0x00, # O 0x04, 0x00, # O 0x0F, 0x00, # OOOO 0x3D, 0x80, # OOOO OO 0x35, 0xC0, # OO O OOO 0x75, 0xC0, # OOO O OOO 0x79, 0xC0, # OOOO OOO 0x7E, 0x00, # OOOOOO 0x7F, 0x80, # OOOOOOOO 0x3F, 0xC0, # OOOOOOOO 0x3F, 0xC0, # OOOOOOOO 0x0F, 0xE0, # OOOOOOO 0x63, 0xE0, # OO OOOOO 0xF5, 0xE0, # OOOO O OOOO 0xF4, 0xE0, # OOOO O OOO 0xE4, 0xC0, # OOO O OO 0x75, 0xC0, # OOO O OOO 0x1F, 0x00, # OOOOO 0x04, 0x00, # O 0x04, 0x00, # O 0x00, 0x00, # 0x00, 0x00, # # @144 '%' (17 pixels wide) 0x00, 0x00, 0x00, # 0x00, 0x00, 0x00, # 0x00, 0x00, 0x00, # 0x00, 0x00, 0x00, # 0x36, 0x08, 0x00, # OO OO O 0x63, 0x18, 0x00, # OO OO OO 0xE3, 0x90, 0x00, # OOO OOO O 0xE3, 0xB0, 0x00, # OOO OOO OO 0xE3, 0xA0, 0x00, # OOO OOO O 0x63, 0x60, 0x00, # OO OO OO 0x36,
0x40, 0x00, # OO OO O 0x00, 0xC0, 0x00, # OO 0x00, 0x80, 0x00, # O 0x01, 0x9E, 0x00, # OO OOOO 0x01, 0x33, 0x00, #
O OO OO 0x03, 0x73, 0x80, # OO OOO OOO 0x02, 0x73, 0x80, # O OOO OOO 0x06, 0x73, 0x80, # OO OOO OOO 0x04, 0x33, 0x00, # O OO OO 0x0C, 0x1E, 0x00, # OO OOOO 0x00, 0x00, 0x00, # 0x00, 0x00, 0x00, # 0x00, 0x00, 0x00, # 0x00, 0x00, 0x00, # # @216 '&' (15 pixels wide) 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x07, 0xA0, # OOOO O 0x0F, 0x10, # OOOO O 0x1F, 0x10, # OOOOO O 0x1F, 0x10, # OOOOO O 0x1F, 0xA0, # OOOOOO O 0x1F, 0x80, # OOOOOO 0x0F, 0xCC, # OOOOOO OO 0x3F, 0xCC, # OOOOOOOO OO 0x77, 0xEC, # OOO OOOOOO OO 0x77, 0xE8, # OOO OOOOOO O 0xFB, 0xF0, # OOOOO OOOOOO 0xF9, 0xF8, # OOOOO OOOOOO 0xFD, 0xF8, # OOOOOO OOOOOO 0x7C, 0xFC, # OOOOO OOOOOO 0x7E, 0xFC, # OOOOOO OOOOOO 0x1F, 0x7E, # OOOOO OOOOOO 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # # @264 ''' (3 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0xE0, # OOO 0xE0, # OOO 0xE0, # OOO 0xE0, # OOO 0xE0, # OOO 0x60, # OO 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # # @288 '(' (6 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0x0C, # OO 0x1C, # OOO 0x1C, # OOO 0x38, # OOO 0x78, # OOOO 0x78, # OOOO 0x78, # OOOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0xF8, # OOOOO 0x78, # OOOO 0x78, # OOOO 0x78, # OOOO 0x38, # OOO 0x1C, # OOO 0x1C, # OOO 0x0C, # OO # @312 ')' (6 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0xC0, # OO 0xE0, # OOO 0xE0, # OOO 0x70, # OOO 0x78, # OOOO 0x78, # OOOO 0x78, # OOOO 0x7C, # OOOOO 0x7C, # OOOOO 0x7C, # OOOOO 0x7C, # OOOOO 0x7C, # OOOOO 0x7C, # OOOOO 0x78, # OOOO 0x78, # OOOO 0x78, # OOOO 0x70, # OOO 0xE0, # OOO 0xE0, # OOO 0xC0, # OO # @336 '*' (9 pixels wide) 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x1E, 0x00, # OOOO 0x5F, 0x80, # O OOOOOO 0xEF, 0x80, # OOO OOOOO 0xFB, 0x80, # OOOOO OOO 0x0C, 0x00, # OO 0x37, 0x00, # OO OOO 0x77, 0x00, # OOO OOO 0x36, 0x00, # OO OO 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # # @384 '+' (12 pixels wide) 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0xFF, 0xF0, # OOOOOOOOOOOO 0xFF, 0xF0, # OOOOOOOOOOOO 0xFF, 0xF0, # OOOOOOOOOOOO 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0x07, 0x00, # OOO 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # 0x00, 0x00, # # @432 ',' (4 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x60, # OO 0xF0, # OOOO 0xF0, # OOOO 0xF0, # OOOO 0x70, # OOO 0x20, # O 0x60, # OO 0xC0, # OO # @456 '-' (6 pixels wide) 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0x00, # 0
SivagnanamCiena/mock-s3
tests/push.py
Python
mit
652
0.003067
#!/usr/bin/env python import boto from boto.s3.key import Key OrdinaryCallingFormat = boto.config.get('s3', 'calling_format', 'bo
to.s3.connection.OrdinaryCallingFormat') s3 = boto.connect_s3(host='localhost', port=10001, calling_format=OrdinaryCallingFormat, is_secure=False) b = s3.get_bucket('mocking') k_cool = Key(b) k_cool.key = 'cool.html' k_cool.set_contents_from_string('this is some really cool html') k_green = Key(b) k_green.key = 'green.html' k_green.set_contents_fro
m_string('this is some really good music html') k_horse = Key(b) k_horse.key = 'seminoles.html' k_horse.set_contents_from_string('this is some really seminoles html')
ncliam/serverpos
openerp/addons/hr_gamification/wizard/grant_badge.py
Python
agpl-3.0
2,525
0.002376
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ class hr_grant_badge_wizard(osv.TransientModel): _name = 'gamification.badge.user.wizard' _inherit = ['gamification.badge.user.wizard'] _columns = { 'employee_id': fields.many2one("hr.employee", string='Employee', required=True), 'user_id': fields.related("employee_id", "user_id", type="many2one", relation="res.users", store=True, string='User') } def action_grant_badge(self, cr, uid, ids, context=None): """Wizard action for sending a badge to a chosen employee""" if context is None: context = {} badge_user_obj = self.pool.get('gamification.badge.user') for wiz in self.browse(cr, uid, ids, context=context): if not wiz.user_id: raise osv.except_osv(_('Warning!'), _('You can send badges only to employees linked to a user.')) if uid == wiz.user_id.id:
raise osv.except_osv(_('Warning!'), _('You can not send a badge to
yourself')) values = { 'user_id': wiz.user_id.id, 'sender_id': uid, 'badge_id': wiz.badge_id.id, 'employee_id': wiz.employee_id.id, 'comment': wiz.comment, } badge_user = badge_user_obj.create(cr, uid, values, context=context) result = badge_user_obj._send_badge(cr, uid, [badge_user], context=context) return result
TheWardoctor/Wardoctors-repo
script.module.fantastic/lib/resources/lib/sources/en/rlsbb.py
Python
apache-2.0
5,587
0.013782
# -*- coding: utf-8 -*- ''' fantastic Add-on This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re,urllib,urlparse from resources.lib.modules import cleantitle from resources.lib.modules import client from resources.lib.modules import debrid from resources.lib.modules import source_utils class source: def __init__(self): self.priority = 1 self.language = ['en'] self.domains = ['rlsbb.online', 'rlsbb.co'] self.base_link = 'http://rlsbb.co/' self.search_link = '/search/%s/feed/rss2/' self.search_link2 = '/?s=%s&submit=Find' def movie(self, imdb, title, localtitle, aliases, year): try: url = {'imdb': imdb, 'title': title, 'year': year} url = urllib.urlencode(url) return url except: return def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): try: url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year} url = urllib.urlencode(url) return url except: return def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: if url == None: return url = urlparse.parse_qs(url) url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url]) url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode url = urllib.urlencode(url) return url except: return def sources(self, url, hostDict, hostprDict): try: sources = [] if url == None: return sources if debrid.status() is False: raise Exception() data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title'] hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year'] query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year']) query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query) url = self.search_link % urllib.quote_plus(query) url = urlparse.urljoin(self.base_link, url) r = client.request(url) posts = client.parseDOM(r, 'item') hostDict = hostprDict + hostDict items = [] for post in posts: try: t = client.parseDOM(post, 'title')[0] u = client.parseDOM(post, 'enclosure', ret='url', attrs={'type': 'video.+?'}) s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GiB|MiB|GB|MB))', post) s = s[0] if s else '0' items += [(t, i, s) for i in u] except: pass for item in items: try: name = item[0] name = client.replaceHTMLCodes(name) t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name) if not cleantitle.get(t) == cleantitle.get(title): raise Exception() y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper() if not y == hdlr: raise Exception() quality, info = source_utils.get_release_quality(name, item[1]) try: size = re.sub('i', '', item[2]) print size div = 1 if size.endswith('GB') else 1024 size = float(re.sub('[^0-9|
/.|/,]', '', size))/div size = '%.2f GB' % size info.append(size) ex
cept: pass info = ' | '.join(info) url = item[1] if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception() url = client.replaceHTMLCodes(url) url = url.encode('utf-8') valid, host = source_utils.is_host_valid(url, hostDict) if not valid: continue host = client.replaceHTMLCodes(host) host = host.encode('utf-8') sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True}) except: pass check = [i for i in sources if not i['quality'] == 'CAM'] if check: sources = check return sources except: return sources def resolve(self, url): return url
ellisonbg/ipyleaflet
ipyleaflet/basemaps.py
Python
mit
14,548
0.02997
class Bunch(dict): """A dict with attribute-access""" def __getattr__(self, key): try: return self.__getitem__(key) except KeyError: raise AttributeError(key) def __setattr__(self, key, value): self.__setitem__(key, value) def __dir__(self): return self.keys() basemaps = Bunch( OpenStreetMap = Bunch( Mapnik = dict( url = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', max_zoom = 19, attribution = 'Map data (c) <a href="https://openstreetmap.org">OpenStreetMap</a> contributors', name = 'OpenStreetMap.Mapnik' ), BlackAndWhite = dict( url = 'http://{s}.tiles.wmflabs.org/bw-mapnik/{z}/{x}/{y}.png', max_zoom = 18, attribution = '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'OpenStreetMap.BlackAndWhite', ), DE = dict( url = 'http://{s}.tile.openstreetmap.de/tiles/osmde/{z}/{x}/{y}.png', max_zoom = 18, attribution = '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'OpenStreetMap.DE' ), France = dict( url = 'http://{s}.tile.openstreetmap.fr/osmfr/{z}/{x}/{y}.png', max_zoom = 20, attribution = '&copy; Openstreetmap France | &copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'OpenStreetMap.France' ), HOT = dict( url = 'http://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png', max_zoom = 19, attribution = '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>, Tiles courtesy of <a href="http://hot.openstreetmap.org/" target="_blank">Humanitarian OpenStreetMap Team</a>', name = 'OpenStreetMap.HOT' ) ), OpenTopoMap = dict( url = 'https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png', max_zoom = 17, attribution = 'Map data: &copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>, <a href="http://viewfinderpanoramas.org">SRTM</a> | Map style: &copy; <a href="https://opentopomap.org">OpenTopoMap</a> (<a href="https://creativecommons.org/licenses/by-sa/3.0/">CC-BY-SA</a>)', name = 'OpenTopMap' ), OpenMapSurfer = Bunch( Roads = dict( url = 'http://korona.geog.uni-heidelberg.de/tiles/roads/x={x}&y={y}&z={z}', max_zoom = 20, attribution = 'Imagery from <a href="http://giscience.uni-hd.de/">GIScience Research Group @ University of Heidelberg</a> &mdash; Map data &copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'OpenMapSurfer.Roads' ), Grayscale = dict( url = 'http://korona.geog.uni-heidelberg.de/tiles/roadsg/x={x}&y={y}&z={z}', max_zoom = 19, attribution = 'Imagery from <a href="http://giscience.uni-hd.de/">GIScience Research Group @ University of Heidelberg</a> &mdash; Map data &copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'OpenMapSurfer.Grayscale' ) ), Hydda = Bunch( Full = dict( url = 'http://{s}.tile.openstreetmap.se/hydda/full/{z}/{x}/{y}.png', max_zoom = 18, attribution = 'Tiles courtesy of <a href="http://openstreetmap.se/" target="_blank">OpenStreetMap Sweden</a> &mdash; Map data &copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'Hydda.Full' ), Base = dict( url = 'http://{s}.tile.openstreetmap.se/hydda/base/{z}/{x}/{y}.png', max_zoom = 18, attribution = 'Tiles courtesy of <a href="http://openstreetmap.se/" target="_blank">OpenStreetMap Sweden</a> &mdash; Map data &copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'Hydda.Base' ), ), Esri = Bunch( WorldStreetMap = dict( url = 'http://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}', max_zoom = 20, attribution = 'Tiles &copy; Esri &mdash; Source: Esri, DeLorme, NAVTEQ, USGS, Intermap, iPC, NRCAN, Esri Japan, METI, Esri China (Hong Kong), Esri (Thailand), TomTom, 2012', name = 'Esri.WoldStreetMap' ), DeLorme = dict( url = 'http://server.arcgisonline.com/ArcGIS/rest/services/Specialty/DeLorme_World_Base_Map/MapServer/tile/{z}/{y}/{x}', min_zoom = 1, max_zoom = 11, attribution = 'Tiles &copy; Esri &mdash; Copyright: &copy;2012 DeLorme', name = 'Esri.DeLorme' ), WorldTopoMap = dict( url = 'http://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}', max_zoom = 20, attribution = 'Tiles &copy; Esri &mdash; Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community', name = 'Esri.WorldTopoMap' ), WorldImagery = dict( url = 'http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', max_zoom = 20, attribution = 'Tiles &copy; Esri &mdash; Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community', name = 'Esri.WorldImagery' ), NatGeoWorldMap = dict( url = 'http://server.arcgisonline.com/ArcGIS/rest/services/NatGeo_World_Map/MapServer/tile/{z}/{y}/{x}', max_zoom = 16, attribution = 'Tiles &copy; Esri &mdash; National Geographic, Esri, DeLorme, NAVTEQ, UNEP-WCMC, USGS, NASA, ESA, METI, NRCAN, GEBCO, NOAA, iPC', name = 'Esri.NatGeoWorldMap' ), ), HikeBike = Bunch( HikeBike = dict( url = 'http://{s}.tiles.wmflabs.org/hikebike/{z}/{x}/{y}.png', max_zoom = 19, attribution = '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>', name = 'HikeBike.HikeBike' ), ), MtbMap = dict( url = 'http://tile.mtbmap.cz/mtbmap_tiles/{z}/{x}/{y}.png', max_zoom = 20, attribution = '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> &amp; USGS', name = 'MtbMap' ), CartoDB = Bunch( Positron = dict( url = 'http://c.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png', max_zoom = 20, attribution = '&copy; <a href="http://www.openstreetmap.org
/copyright">OpenStreetMap</a> &copy; <a href="http://cartodb.com/attributions">CartoDB</a>', name = 'CartoDB.Positron' ), DarkMatter = dict(
url = 'http://c.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}.png', max_zoom = 20, attribution = '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> &copy; <a href="http://cartodb.com/attributions">CartoDB</a>', name = 'CartoDB.DarkMatter' ) ), NASAGIBS = Bunch( ModisTerraTrueColorCR = dict( url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Terra_CorrectedReflectance_TrueColor/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg', max_zoom = 9, attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.', name = 'NASAGIBS.ModisTerraTrueColorCR' ), ModisTerraBands367CR = dict( url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Terra_CorrectedReflectance_Bands367/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg', max_zoom = 9, attribution
bengjerstad/windowslogonofflogger
logserver/__init__.py
Python
mit
450
0.048889
import hug try: from . import runserver ##to run windowslogonofflo
gger ##https://github.com/bengjerstad/windowslogonofflogger hug.API(__name__).extend(runserver, '') print('Running windowslogonofflogger Server') except: pass try: from . import logserver ##to run MulitUse Log Server ##https://github.com/bengjerstad/multiuselogserver hug.API(__name__).extend(logserver, '/logserver') print('Running MultiUselog Server') except: pass
FocusLab/willie
vendor/lightningjs/lib/python/lightningjs/http/__init__.py
Python
bsd-3-clause
2,265
0.001766
import os import urlparse from SocketServer import ThreadingMixIn from wsgiref.util import setup_testing_defaults from wsgiref.simple_server import make_server, WSGIServer from lightningjs.http.gzipper import GzipperMiddleware class ThreadedWsgiServer(ThreadingMixIn, WSGIServer): pass class RoutableApplication(object): def __init__(self, routable_object): self.__routable_object = routable_object def __call__(self, environ, start_response): # parse the request setup_testing_defaults(environ) path = environ['PATH_INFO'] querystring = environ['QUERY_STRING'] multiargs = urlparse.parse_qs(querystring) # get the route and the associated Python method, then execute # that method with the given querystring parameters as Python kwargs if path[1:]: path_method = 'get_%s' % path[1:] else: path_method = 'get_index' if hasattr(self.__routable_object, path_method): # call the routed method single_value_args = {} for key in multiargs: single_value_args[key] = multiargs[key][0
] status, content_type, content = getattr(self.__routable_object, path_method)(**single_value_args) else: # route doesn't exist content_type = 'text/html'
content = status = '404 NOT FOUND' # write out the HTTP response status = '200 OK' headers = [('Content-type', content_type)] start_response(status, headers) return [content] def serve_routable_object(routable_object, port): routable_server = RoutableApplication(routable_object=routable_object) httpd = make_server( host='', port=port, app=GzipperMiddleware(routable_server, compresslevel=8), server_class=ThreadedWsgiServer, ) httpd.serve_forever() def render_browser_template(path, **kwargs): template_path = os.path.join(os.path.dirname(__file__), 'templates', path) with open(template_path, 'r') as template_fd: content = template_fd.read() if kwargs: # do Python string templates if given content = content % kwargs return content
spulec/moto
tests/test_core/test_nested.py
Python
apache-2.0
736
0
import sure # noqa # pylint: disable=unused-import import unittest import boto3 from moto import mock_sqs, mock_ec2 from tests import EXAMPLE_AMI_ID class TestNestedDecoratorsBoto3(unittest
.TestCase): @mock_sqs def setup_sqs_queue(self): conn = boto3.resource("sqs", region_name="us-east-1") queue = conn.create_queue(QueueName="some-queue") queue.send_message(MessageBody="test message 1") queue.reload() queue.attributes["ApproximateNumberOfMessages"].should.equal("1")
@mock_ec2 def test_nested(self): self.setup_sqs_queue() conn = boto3.client("ec2", region_name="us-west-2") conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
anhstudios/swganh
data/scripts/templates/object/mobile/shared_kaja_orzee.py
Python
mit
437
0.048055
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create
(kernel): result = Cr
eature() result.template = "object/mobile/shared_kaja_orzee.iff" result.attribute_template_id = 9 result.stfName("theme_park_name","kaja_orzee") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
depop/django-oauth2-provider
provider/oauth2/admin.py
Python
mit
840
0
from django.apps import apps from d
jango.contrib import admin AccessToken = apps.get_model('oauth2', 'AccessToken') Client = apps.get_model('oauth2', 'Client') Grant = apps.get_model('oauth2', 'Grant') RefreshToken = apps.get_model('oauth2', 'RefreshToken') class AccessTokenAdmin(admin.ModelAdmin): list_display = ('user', 'client', 'token', 'expires', 'scope') raw_id_fields = ('user',) class GrantAdmin(admin.ModelAdmin): list_display = ('user', 'client', 'code', 'expires') raw_id_fields = (
'user',) class ClientAdmin(admin.ModelAdmin): list_display = ('url', 'user', 'redirect_uri', 'client_id', 'client_type') raw_id_fields = ('user',) admin.site.register(AccessToken, AccessTokenAdmin) admin.site.register(Grant, GrantAdmin) admin.site.register(Client, ClientAdmin) admin.site.register(RefreshToken)
akiokio/centralfitestoque
src/.pycharm_helpers/python_stubs/-1807332816/zipimport.py
Python
bsd-2-clause
4,893
0.008379
# encoding: utf-8 # module zipimport # from (built-in) # by generator 1.130 """ zipimport provides support for importing Python modules from Zip archives. This module exports three objects: - zipimporter: a class; its constructor takes a path to a Zip archive. - ZipImportError: exception raised by zipimporter objects. It's a subclass of ImportError, so it can be caught as ImportError, too. - _zip_directory_cache: a dict, mapping archive paths to zip directory info dicts, as used in zipimporter._files. It is usually not needed to use the zipimport module explicitly; it is used by the builtin import mechanism for sys.path items that are paths to Zip archives. """ # no imports # no functions # classes class zipimporter(object): """ zipimporter(archivepath) -> zipimporter object Create a new zipimporter instance. 'archivepath' must be a path to a zipfile, or to a specific path inside a zipfile. For centralfitestoque, it can be '/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a valid directory inside the archive. 'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip archive. The 'archive' attribute of zipimporter objects contains the name of the zipfile targeted. """ def find_module(self, fullname, path=None): # real signature unknown; restored from __doc__ """ find_module(fullname, path=None) -> self or None. Search for a module specified by 'fullname'. 'fullname' must be the fully qualified (dotted) module name. It returns the zipimporter instance itself if the module was found, or None if it wasn't. The optional 'path' argument is ignored -- it's there for compatibility with the importer protocol. """ return self def get_code(self, fullname): # real signature unknown; restored from __doc__ """ get_code(fullname) -> code object. Return the code object for the specified module. Raise ZipImportError if the module couldn't be found. """ pass def get_data(self, pathname): # real signature unknown; restored from __doc__ """ get_data(pathname) -> string with file data. Return the data associated with 'pathname'. Raise IOError if the file wasn't found. """ return "" def get_filename(self, fullname): # real signature unknown; restored from __doc__ """ get_filename(fullname) -> filename string. Return the filename for the specified module. """ pass def get_source(self, fullname): # real signature unknown; restored from __doc__ """ get_source(fullname) -> source string. Return the source code for the specified module. Raise ZipImportError if the module couldn't be found, return None if the archive does contain the module, but has no source for it. """ pass def is_package(self, fullname): # real signature unknown; restored from __doc__ """ is_package(fullname) -> bool. Return True if the module specified by fullname is a package. Raise ZipImportError if the module couldn't be found. """ pass def load_module(self, fullname): # real signature unknown; restored from __doc__ """ load_module(fullname) -> module. Load the module specified by 'fullname'. 'fullname' must be the fully qualified (dotted) module name. It returns the imported module, or raises ZipImportError if it wasn't found. """ pass def __getattribute__(self, name): # real signature unknown; restored from __doc__ """ x.__getattribute__('name') <==> x.name """ pass def __init__(self, archivepath): # real signature unknown; restored from __doc__ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass def __repr__(self): # real signature unknown; restored from __doc__ """ x.__repr__() <==> repr(x) """ pass archive = property(lambda self: '') """:type: string""" prefix = property(lambda self: '') """:type: string""" _files = property(lambda self: {}) """:type: dict""" class ZipImportError(ImportError): # no doc def __init__(self, *args, **kwargs): # real signature
unknown pass __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default """list of weak references to the object (if defined)""" # variables with complex values _zip_directory_cache = {} # real value of type <type 'dic
t'> skipped
eltuxusb/eltuxusb
eltuxusb/el_input.py
Python
gpl-3.0
981
0.004077
# -*- coding: utf-8 -*- # This module contains classes to manage EL1USB device # This class reads the content of the EL-USB-1 thermometer import sys import datetime import time class el1_input: "Doc ..." def __init__(self): self.fake = 0 def request(self, text, base_value, min_value, max_value): print text, while base_value < min_value or base_value > max_value: base_value = input() if base_value <
min_value: print "value too low, should be between", min_value, "and", max_value if base_value > max_value: print "value too high, should be between", min_value, "and", max_value return base_value
def convert_name(self, name): new_buffer = [] for char in name: new_buffer.append(ord(char)) count = len(name) while count != 16: new_buffer.append(0) count += 1 return new_buffer
ralphiee22/kolibri
kolibri/logger/api.py
Python
mit
2,079
0.002886
from kolibri.auth.api import KolibriAuthPermissions, KolibriAuthPermissionsFilter from kolibri.content.api import OptionalPageNumberPagination from rest_framework import filters, viewsets from .models import ContentRatingLog, ContentSessionLog, ContentSummaryLog, UserSessionLog from .serializers import ContentRatingLogSerializer, ContentSessionLogSerializer, ContentSummaryLogSerializer, UserSessionLogSerializer class ContentSessionLogFilter(filters.FilterSet): class Meta: model = ContentSessionLog fields = ['user_id', 'content_id'] class ContentSessionLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, filters.DjangoFilterBackend) queryset = ContentSessionLog.objects.all() serializer_class = ContentSessionLogSerializer pagination_class = OptionalPageNumberPagination filter_class = ContentSessionLogFilter class ContentSummaryFilter(filters.FilterSet): class Meta: model = ContentSummaryLog fields = ['user_id', 'content_id'] class ContentSummaryLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, filters.DjangoFilterBackend) queryset = ContentSummaryLog.objects.all() serializer_class = ContentSummaryLogSerializer pagination_class = OptionalPageNumberPagination filter_class = ContentSummaryFilter class ContentRatingLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPer
missions,) filter_backends = (KolibriAuthPermissionsFilter,) queryset = ContentRatingLog.objects.all() serializer_class = ContentRatingLogSerializer pagination_class = OptionalPageNumberPagination class UserSessionLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAut
hPermissions,) filter_backends = (KolibriAuthPermissionsFilter,) queryset = UserSessionLog.objects.all() serializer_class = UserSessionLogSerializer pagination_class = OptionalPageNumberPagination
youtube/cobalt
build/util/lib/common/PRESUBMIT.py
Python
bsd-3-clause
513
0.005848
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. def _RunTests(input_api, output_api): return (input_api.canned_checks.RunUnitTestsInDir
ectory( input_api, output_api, '.', files_to_check=[r'.+_test.py$'])) def CheckChangeOnUpload(input_api,
output_api): return _RunTests(input_api, output_api) def CheckChangeOnCommit(input_api, output_api): return _RunTests(input_api, output_api)
m-labs/llvmlite
llvmlite/tests/__init__.py
Python
bsd-2-clause
1,433
0.000698
import sys import unittest from unittest import TestCase try: import faulthandler except ImportError: pass else: try: # May fail in IPython Notebook with UnsupportedOperation faulthandler.enable() except BaseException as e: msg = "Failed to enable faulthandler due to:\n{err}" warnings.warn(msg.format(err=e))
# Try to inject Numba's unittest customizations. from . import customize def discover_tests(startdir): """Discover test under a directory """ # Avoid importing unittest loader = unittest.TestLoader() suite = loader.discover(startdir) return suite def run_tests(suite=None, xmloutput=None, verbosity=1): """ args
---- - suite [TestSuite] A suite of all tests to run - xmloutput [str or None] Path of XML output directory (optional) - verbosity [int] Verbosity level of tests output Returns the TestResult object after running the test *suite*. """ if suite is None: suite = discover_tests("llvmlite.tests") if xmloutput is not None: import xmlrunner runner = xmlrunner.XMLTestRunner(output=xmloutput) else: runner = None prog = unittest.main(suite=suite, testRunner=runner, exit=False, verbosity=verbosity) return prog.result def main(): res = run_tests() sys.exit(0 if res.wasSuccessful() else 1)
Smaed/pyDbManager
lib/Lang.py
Python
gpl-2.0
228
0.004386
#!/usr/bin/env
python # -*- coding: utf-8 -*- MENU_FILE = "File" FILE_NEW = "New"
FILE_OPEN = "Open" FILE_EXIT = "Exit" TAB_DATA = "Data" TAB_SQL = "SQL" BUTTON_EXIT = "Exit"
djangomini/djangomini
test_project/wsgi.py
Python
mit
373
0
""" WSGI config for project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangopr
oject.com/en/1.9/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "settings") application = get_wsgi_application()
khchine5/lino
lino/utils/cycler.py
Python
bsd-2-clause
1,929
0.000518
# -*- coding: UTF-8 -*- # Copyright 2013-2014 by Luc Saffre. # License: BSD, see LICENSE for more details. """ Turns a list of items into an endless loop. Useful when generating demo fixtures. >>> from lino.utils import Cycler >>> def myfunc(): ... yield "a" ... yield "b" ... yield "c" >>> c = Cycler(myfunc()) >>> s = "" >>> for i in range(10): ... s += c.pop() >>> print (s) abcabcabca An empty Cycler or a Cycler on an empty list will endlessly pop None values: >>> c = Cycler() >>> print (c.pop(), c.pop(), c.pop()) None None None >>> c = Cycler([]) >>> print (c.pop(), c.pop(), c.pop()) None None None >>> c = Cycler(None) >>> print (c.pop(), c.pop(), c.pop()) None None None """ from __future__ import unicode_literals from __future__ import print_function from builtins import object class Cycler(object): def __init__(self, *args): """ If there is exactly one argument, then this must be an iterable and will be used as the list of items to cycle on.
If there is more than one positional argument, then these arguments themselves will be the list of items. """ if len(args) == 0: self.items = [] elif len(args) ==
1: if args[0] is None: self.items = [] else: self.items = list(args[0]) else: self.items = args self.current = 0 def pop(self): if len(self.items) == 0: return None item = self.items[self.current] self.current += 1 if self.current >= len(self.items): self.current = 0 if isinstance(item, Cycler): return item.pop() return item def __len__(self): return len(self.items) def reset(self): self.current = 0 def _test(): import doctest doctest.testmod() if __name__ == "__main__": _test()
shelt/Fries
modules/user.py
Python
apache-2.0
3,458
0.006362
# Local imports from connect import m from crypto import * #NOTE: SQL '?' tuples must use '_t' as var name #NOTE: the tuples that fetch(one|all)() returns should be called 'res' ####################### # USER CLASS # ####################### # IDs and names are stored as fields because they # are used to query the database. Everything else # is queried itself. class User: def __init__(self, id): self.id = id def get_name(self): _t = (self.id,) m.execute("SELECT name FROM Users WHERE id = ?", _t) res = m.fetchone() return res[0] def mod_lkarma(self, inc=True): if inc: delta = "+1" else: delta = "-1" _t = (self.id,self.id) m.execute("UPDATE Users SET lkarma = ( ( SELECT lkarma FROM Users WHERE id = ? ) "+delta+" ) WHERE id = ?", _t) def mod_ckarma(self, inc=True): if inc: delta = "+1" else: delta = "-1" _t = (self.id,self.id) m.execute("UPDATE Users SET ckarma = ( ( SELECT ckarma FROM Users WHERE id = ? ) "+delta+" ) WHERE id = ?", _t) def get_lkarma(self): _t = (self.id,) m.execute("SELECT lkarma FROM Users WHERE id = ?", _t) res = m.fetchone() return res[0] def get_ckarma(self): _t = (self.id,) m.execute("SELECT ckarma FROM Users WHERE id = ?", _t) res = m.fetchone() return res[0] ####################### # USERACCOUNT ACTIONS # ####################### # Functions related to user auth and # other account-related activities. def new_user(username, password, email): if username_exists(username): return False id = get_new_id() salt = get_salt() hash = get_hash(password, salt) _t = (id, username, email, hash, salt, 0, 0) m.execute("INSERT INTO Users (id, name, mail, hash, salt, lkarma, ckarma) VALUES(?,?,?,?,?,?,?)", _t)
return True def verify_user(username, password): _t = (username,) m.execute("SELECT * FROM Users WHERE name IS ?", _t) res = m.fetchone() assert res[1] == username
if get_hash(password, res[4]) == res[3]: return True else: return False def username_exists(username): _t = (username,) m.execute("SELECT COUNT(1) FROM Users WHERE name IS ?", _t) if m.fetchone()[0] == 1: return True ###################### # USER GETTERS # ###################### # These functions get Users. # Getting user-specific data is done objectively. # BULK GETTERS def get_users(): users = [] m.execute("SELECT id FROM Users") for res in m.fetchall(): user = User(res[0]) users.append(user) return users def get_users_by_mail(mail): users = [] _t = (mail,) m.execute("SELECT id FROM Users WHERE mail IS ?", _t) for res in m.fetchall(): user = User(res[0]) users.append(user) return users # SINGLE GETTERS def get_user(id): return User(id) def get_user_by_name(name): _t = (name,) m.execute("SELECT id FROM Users WHERE name IS ?", _t) res = m.fetchone() return User(res[0]) ####################### # MISC # ####################### def get_new_id(): m.execute("SELECT id FROM Users WHERE id = ( SELECT MAX(id) FROM Users )") res = m.fetchone() if not res: # No users return 0 else: return int(res[0])+1
ulmusic/python-evolver
test/DS_sweep.py
Python
gpl-2.0
1,956
0.00818
import subprocess import multiprocessing import select import fcntl, os import re import matplotlib.pyplot as plt import numpy as np import time import evolver import logging evolver.logger.setLevel(logging.DEBUG) evolver.logger.addHandler(logging.StreamHandler()) def find_values(fileloc, phrase): wf = open(fileloc, 'r') words = wf.read().split() i = 0 param = list() for x in words: if x == phrase: param.append(words[i + 2]) break i += 1 return param def define_values(bo): wf = open('./dropSinusoidal.fe', 'r') m = wf.read() m = re.sub('Bo = \w*.\w*', 'Bo = %f' % bo, m) wf = open('./dropSinusoidal.fe', 'w') wf.write(m) def main(): """The main routine.""" with evolver.Evolver() as E: n = 1000 init_bo = 0 params = np.empty([3, n]) for i in range(n): print '-----------------\nTime Around ' + str(i) bo = init_
bo + 0.05*i define_values(bo) E.open_file('dr
opSinusoidal.fe') for j in range(1): #E.refine(1) vals = E.evolve(1) E.run_command('car_app') E.run_command('car') E.run_command('') E.run_command('dump') E.run_command('') E.close_file() params[0, i] = bo params[1, i] = find_values('dropSinusoidal.fe.dmp', 'contact_angle_right')[0] params[2, i] = find_values('dropSinusoidal.fe.dmp', 'contact_angle_right_app')[0] print 'To Plot' plt.plot(params[0, :], params[1, :], label="Contact Angle") plt.plot(params[0, :], params[2, :], label="Apparent Contact Angle") plt.ylabel('Contact Angle') plt.xlabel('Bond Number (Bo)') plt.legend() plt.show() if __name__ == "__main__": main()
MrTheodor/espressopp
src/interaction/TabulatedSubEnsAngular.py
Python
gpl-3.0
5,918
0.010645
# Copyright (C) 2018 # Max Planck Institute for Polymer Research # # This file is part of ESPResSo++. # # ESPResSo++ is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo++ is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. r""" *************************************** espressopp.interaction.TabulatedSubEnsAngular *************************************** .. function:: espressopp.interaction.TabulatedSubEnsAngular(dim, itype, filenames) :param dim: Number of potentials to be used for this interaction :param itype: The interpolation type: 1 - linear, 2 - akima spline, 3 - cubic spline :param filenames: The tabulated potential filenames. :type itype: int :type filename: str .. function:: espressopp.interaction.FixedTripleListTabulate
dSubEnsAngular(system, ftl, potential) :param system: The Espresso++ system object. :param ftl: The FixedTripleList. :param potential: The potential. :type system: espressopp.System :type ftl: espressopp.FixedTripleList :type potential: espr
essopp.interaction.Potential .. function:: espressopp.interaction.FixedTripleListTabulatedSubEnsAngular.setPotential(potential) :param potential: The potential object. :type potential: espressopp.interaction.Potential .. function:: espressopp.interaction.FixedTripleListTypesTabulatedSubEnsAngular(system, ftl) :param system: The Espresso++ system object. :type system: espressopp.System :param ftl: The FixedTriple list. :type ftl: espressopp.FixedTripleList .. function:: espressopp.interaction.FixedTripleListTypesTabulatedSubEnsAngular.setPotential(type1, type2, type3, potential) Defines angular potential for interaction between particles of types type1-type2-type3. :param type1: Type of particle 1. :type type1: int :param type2: Type of particle 2. :type type2: int :param type3: Type of particle 3. :type type3: int :param potential: The potential to set up. :type potential: espressopp.interaction.AngularPotential """ from espressopp import pmi from espressopp.esutil import * from espressopp.interaction.AngularPotential import * from espressopp.interaction.Interaction import * from _espressopp import interaction_TabulatedSubEnsAngular, \ interaction_FixedTripleListTabulatedSubEnsAngular, \ interaction_FixedTripleListTypesTabulatedSubEnsAngular class TabulatedSubEnsAngularLocal(AngularPotentialLocal, interaction_TabulatedSubEnsAngular): def __init__(self): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): cxxinit(self, interaction_TabulatedSubEnsAngular) class FixedTripleListTabulatedSubEnsAngularLocal(InteractionLocal, interaction_FixedTripleListTabulatedSubEnsAngular): def __init__(self, system, ftl, potential): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): cxxinit(self, interaction_FixedTripleListTabulatedSubEnsAngular, system, ftl, potential) def setPotential(self, potential): if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): self.cxxclass.setPotential(self, potential) class FixedTripleListTypesTabulatedSubEnsAngularLocal(InteractionLocal, interaction_FixedTripleListTypesTabulatedSubEnsAngular): def __init__(self, system, ftl): if pmi.workerIsActive(): cxxinit(self, interaction_FixedTripleListTypesTabulatedSubEnsAngular, system, ftl) def setPotential(self, type1, type2, type3, potential): if pmi.workerIsActive(): self.cxxclass.setPotential(self, type1, type2, type3, potential) def getPotential(self, type1, type2, type3): if pmi.workerIsActive(): return self.cxxclass.getPotential(self, type1, type2, type3) def setFixedTripleList(self, ftl): if pmi.workerIsActive(): self.cxxclass.setFixedTripleList(self, ftl) def getFixedTripleList(self): if pmi.workerIsActive(): return self.cxxclass.getFixedTripleList(self) if pmi.isController: class TabulatedSubEnsAngular(AngularPotential): 'The TabulatedSubEnsAngular potential.' pmiproxydefs = dict( cls = 'espressopp.interaction.TabulatedSubEnsAngularLocal', pmicall = ['weight_get', 'weight_set', 'alpha_get', 'alpha_set', 'targetProb_get', 'targetProb_set', 'colVarSd_get', 'colVarSd_set', 'dimension_get', 'filenames_get', 'filename_get', 'filename_set', 'addInteraction', 'colVarRefs_get', 'colVarRef_get'] ) class FixedTripleListTabulatedSubEnsAngular(Interaction): __metaclass__ = pmi.Proxy pmiproxydefs = dict( cls = 'espressopp.interaction.FixedTripleListTabulatedSubEnsAngularLocal', pmicall = ['setPotential', 'getFixedTripleList'] ) class FixedTripleListTypesTabulatedSubEnsAngular(Interaction): __metaclass__ = pmi.Proxy pmiproxydefs = dict( cls = 'espressopp.interaction.FixedTripleListTypesTabulatedSubEnsAngularLocal', pmicall = ['setPotential','getPotential', 'setFixedTripleList', 'getFixedTripleList'] )
yunity/foodsaving-backend
karrot/activities/migrations/0022_add_activity_types.py
Python
agpl-3.0
1,901
0.00263
# Generated by Django 3.0.9 on 2020-08-16 20:47 from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('groups', '0042_auto_20200507_1258'), ('activities', '0021_remove_activity_feedback_as_sum'), ] operations = [ migrations.CreateModel( name='ActivityType', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('created_at', models.DateTimeField(default=django.utils.timezone.now)), ('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='activity_types', to='groups.Group')), ('name', models.CharField(max_length=80)), ('colour', models.CharField(max_length=6)), ('icon', models.CharField(max_length=32)), ('feedback_icon', models.CharField(max_length=32)), ('has_feedback', models.BooleanField(default=True)),
('has_feedback_weight', models.BooleanField(default=True)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='activity', name='activity_type', f
ield=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activities', to='activities.ActivityType'), ), migrations.AddField( model_name='activityseries', name='activity_type', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activity_series', to='activities.ActivityType'), ), migrations.AlterUniqueTogether( name='activitytype', unique_together={('group', 'name')}, ), ]
BetterCollective/thumbor
thumbor/utils.py
Python
mit
2,733
0
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/thumbor/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com timehome@corp.globo.com import os import logging from functools import wraps
CONTENT_TYPE = { '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.gif': 'image/gif', '.png': 'image/png', '.webp': 'image/webp', '.mp4': 'video/mp4', '.webm': 'video/webm', '.svg': 'image/svg+xml', } EXTENSION = { 'image/jpeg': '.jpg', 'image/gif': '.gif', 'image/png': '.png', 'image/webp': '.webp', 'video/mp4': '.mp4', 'video/webm': '.webm', 'image/svg+xml': '.svg', }
logger = logging.getLogger('thumbor') class on_exception(object): def __init__(self, callback, exception_class=Exception): self.callback = callback self.exception_class = exception_class def __call__(self, fn): def wrapper(*args, **kwargs): self_instance = args[0] if len(args) > 0 else None try: return fn(*args, **kwargs) except self.exception_class as exc_value: if self.callback: # Execute the callback and let it handle the exception if self_instance: return self.callback( self_instance, fn.__name__, self.exception_class, exc_value ) else: return self.callback( fn.__name__, self.exception_class, exc_value ) else: raise return wrapper class deprecated(object): def __init__(self, msg=None): self.msg = ": {0}".format(msg) if msg else "." def __call__(self, func): @wraps(func) def new_func(*args, **kwargs): logger.warn( "Deprecated function {0}{1}".format(func.__name__, self.msg) ) return func(*args, **kwargs) return new_func def which(program): def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None
robotics-at-maryland/qubo
src/teleop/src/keyboard_controller.py
Python
mit
2,473
0.024262
import pygame import rospy import time from std_msgs.msg import Float64 from std_msgs.msg import Float64MultiArray #pygame setup pygame.init() pygame.display.set_mode([100,100]) delay = 100 interval = 50 pygame.key.set_repeat(delay, interval) #really this should be passed in or something but for now if you want to change the name just do it here robot_namespace = "qubo/" effort = 50 num_thrusters = 8 rospy.init_node('keyboard_node', anonymous=False) #rospy spins all these up in their own thread, no need to call spin() roll_pub = rospy.Publisher(robot_namespace + "roll_cmd" , Float64, queue_size = 10 ) pitch_pub = rospy.Publisher(robot_namespace + "pitch_cmd" , Float64, queue_size = 10 ) yaw_pub = rospy.Publisher(robot_namespace + "yaw_cmd" , Float64, queue_size = 10 ) depth_pub = rospy.Publisher(robot_namespace + "depth_cmd" , Float64, queue_size = 10 ) surge_pub = rospy.Publisher(ro
bot_namespace + "surge_cmd" , Float64, queue_size = 10 ) sway_pub = rospy.Publisher(robot_namespace + "sway_cmd" , Float64, queue_size = 10 ) thruster_pub = rospy.Publisher(robot_namespace + "thruster_cmds"
, Float64MultiArray, queue_size = 10) thruster_msg = Float64MultiArray() pygame.key.set_repeat(10,10) while(True): for event in pygame.event.get(): if event.type == pygame.KEYDOWN: print event.key keys_pressed = pygame.key.get_pressed() sway = surge = yaw = depth = 0 thruster_msg.data = [0]*num_thrusters if keys_pressed[pygame.K_a]: sway_pub.publish(effort) elif keys_pressed[pygame.K_d]: sway_pub.publish(-effort) if keys_pressed[pygame.K_w]: surge_pub.publish(effort) print "asdasd" elif keys_pressed[pygame.K_s]: surge_pub.publish(-effort) if keys_pressed[pygame.K_q]: yaw_pub.publish(effort) elif keys_pressed[pygame.K_e]: yaw_pub.publish(-effort) if keys_pressed[pygame.K_r]: depth_pub.publish(effort) elif keys_pressed[pygame.K_f]: depth_pub.publish(-effort) if keys_pressed[pygame.K_MINUS]: sign = -1 else: sign = 1 #this only works because pygame.k_X is a number and k_0 - k_8 are contiguous for i in range(0, 8): if keys_pressed[pygame.K_0 + i]: thruster_msg.data[i] = (effort*sign) print thruster_msg.data thruster_pub.publish(thruster_msg) time.sleep(.05)
arth-co/shoop
shoop/simple_cms/__init__.py
Python
agpl-3.0
823
0
# This file is part of Shoop. # # Copyright (c) 2012-2015, Shoop Ltd. All rights reserved. # # This source code is licensed
under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ import sh
oop.apps class AppConfig(shoop.apps.AppConfig): name = __name__ verbose_name = _("Simple CMS") label = "shoop_simple_cms" provides = { "front_urls_post": [__name__ + ".urls:urlpatterns"], "admin_module": [ "shoop.simple_cms.admin_module:SimpleCMSAdminModule" ], "front_template_helper_namespace": [ "shoop.simple_cms.template_helpers:SimpleCMSTemplateHelpers" ] } default_app_config = __name__ + ".AppConfig"
ericholscher/django-haystack
tests/whoosh_tests/tests/whoosh_backend.py
Python
bsd-3-clause
46,194
0.002165
from datetime import timedelta from decimal import Decimal import os import shutil from whoosh.fields import TEXT, KEYWORD, NUMERIC, DATETIME, BOOLEAN from whoosh.qparser import QueryParser from django.conf import settings from django.utils.datetime_safe import datetime, date from django.test import TestCase from haystack import connections, connection_router, reset_search_queries from haystack import indexes from haystack.inputs import AutoQuery from haystack.models import SearchResult from haystack.query import SearchQuerySet, SQ from haystack.utils.loading import UnifiedIndex from core.models import MockModel, AnotherMockModel, AFourthMockModel from core.tests.mocks import MockSearchResult class WhooshMockSearchIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) name = indexes.CharField(model_attr='author') pub_date = indexes.DateField(model_attr='pub_date') def get_model(self): return MockModel class WhooshAnotherMockSearchIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True) name = indexes.CharField(model_attr='author') pub_date = indexes.DateField(model_attr='pub_date') def get_model(self): return AnotherMockModel def prepare_text(self, obj): return obj.author class AllTypesWhooshMockSearchIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True, use_template=True) name = indexes.CharField(model_attr='author', indexed=False) pub_date = indexes.DateField(model_attr='pub_date') sites = indexes.MultiValueField() seen_count = indexes.IntegerField(indexed=False) is_active = indexes.BooleanField(default=True) def get_model(self): return MockModel class WhooshMaintainTypeMockSearchIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(document=True) month = indexes.CharField(indexed=False) pub_date = indexes.DateField(model_attr='pub_date') def get_model(self): return MockModel def prepare_text(self, obj): return "Indexed!\n%s" % obj.pk def prepare_month(self, obj): return "%02d" % obj.pub_date.month class WhooshBoostMockSearchIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField( document=True, use_template=True, template_name='search/indexes/core/mockmodel_template.t
xt' ) author = indexes.CharField(model_attr='author', weight=2.0) editor = indexes.CharField(model_attr='editor') pub_date = indexes.DateField(model_attr='pub_date') def get_model(self): return AFourthMockModel def prepare(self, obj): data = super(WhooshBoostMockSearchIndex, self).prepare(obj) if obj.pk % 2 == 0:
data['boost'] = 2.0 return data class WhooshAutocompleteMockModelSearchIndex(indexes.SearchIndex, indexes.Indexable): text = indexes.CharField(model_attr='foo', document=True) name = indexes.CharField(model_attr='author') pub_date = indexes.DateField(model_attr='pub_date') text_auto = indexes.EdgeNgramField(model_attr='foo') name_auto = indexes.EdgeNgramField(model_attr='author') def get_model(self): return MockModel class WhooshSearchBackendTestCase(TestCase): fixtures = ['bulk_data.json'] def setUp(self): super(WhooshSearchBackendTestCase, self).setUp() # Stow. temp_path = os.path.join('tmp', 'test_whoosh_query') self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH'] settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path self.old_ui = connections['default'].get_unified_index() self.ui = UnifiedIndex() self.wmmi = WhooshMockSearchIndex() self.wmtmmi = WhooshMaintainTypeMockSearchIndex() self.ui.build(indexes=[self.wmmi]) self.sb = connections['default'].get_backend() connections['default']._index = self.ui self.sb.setup() self.raw_whoosh = self.sb.index self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema) self.sb.delete_index() self.sample_objs = MockModel.objects.all() def tearDown(self): if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']): shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH']) settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path connections['default']._index = self.old_ui super(WhooshSearchBackendTestCase, self).tearDown() def whoosh_search(self, query): self.raw_whoosh = self.raw_whoosh.refresh() searcher = self.raw_whoosh.searcher() return searcher.search(self.parser.parse(query), limit=1000) def test_non_silent(self): bad_sb = connections['default'].backend('bad', PATH='/tmp/bad_whoosh', SILENTLY_FAIL=False) bad_sb.use_file_storage = False bad_sb.storage = 'omg.wtf.bbq' try: bad_sb.update(self.wmmi, self.sample_objs) self.fail() except: pass try: bad_sb.remove('core.mockmodel.1') self.fail() except: pass try: bad_sb.clear() self.fail() except: pass try: bad_sb.search('foo') self.fail() except: pass def test_update(self): self.sb.update(self.wmmi, self.sample_objs) # Check what Whoosh thinks is there. self.assertEqual(len(self.whoosh_search(u'*')), 23) self.assertEqual([doc.fields()['id'] for doc in self.whoosh_search(u'*')], [u'core.mockmodel.%s' % i for i in xrange(1, 24)]) def test_remove(self): self.sb.update(self.wmmi, self.sample_objs) self.assertEqual(self.sb.index.doc_count(), 23) self.sb.remove(self.sample_objs[0]) self.assertEqual(self.sb.index.doc_count(), 22) def test_clear(self): self.sb.update(self.wmmi, self.sample_objs) self.assertEqual(self.sb.index.doc_count(), 23) self.sb.clear() self.assertEqual(self.sb.index.doc_count(), 0) self.sb.update(self.wmmi, self.sample_objs) self.assertEqual(self.sb.index.doc_count(), 23) self.sb.clear([AnotherMockModel]) self.assertEqual(self.sb.index.doc_count(), 23) self.sb.clear([MockModel]) self.assertEqual(self.sb.index.doc_count(), 0) self.sb.index.refresh() self.sb.update(self.wmmi, self.sample_objs) self.assertEqual(self.sb.index.doc_count(), 23) self.sb.clear([AnotherMockModel, MockModel]) self.assertEqual(self.raw_whoosh.doc_count(), 0) def test_search(self): self.sb.update(self.wmmi, self.sample_objs) self.assertEqual(len(self.whoosh_search(u'*')), 23) # No query string should always yield zero results. self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []}) # A one letter query string gets nabbed by a stopwords filter. Should # always yield zero results. self.assertEqual(self.sb.search(u'a'), {'hits': 0, 'results': []}) # Possible AttributeError? # self.assertEqual(self.sb.search(u'a b'), {'hits': 0, 'results': [], 'spelling_suggestion': '', 'facets': {}}) self.assertEqual(self.sb.search(u'*')['hits'], 23) self.assertEqual([result.pk for result in self.sb.search(u'*')['results']], [u'%s' % i for i in xrange(1, 24)]) self.assertEqual(self.sb.search(u'', highlight=True), {'hits': 0, 'results': []}) self.assertEqual(self.sb.search(u'index*', highlight=True)['hits'], 23) # DRL_FIXME: Uncomment once highlighting works. # self.assertEqual([result.highlighted['text'][0] for result in self.sb.search('Index*', highlight=True)['results']], ['<em>Indexed</em>!\n3', '<em>Indexed</em>!\n2', '<em>Indexed</em>!\n1']) self.assertEqual(self.sb.search(u'Indx')['hits'], 0) self.assertEqual(self.sb.search(u'Indx')['spelling_suggestion'], u'index')
LIMXTEC/BitCore
test/functional/combine_logs.py
Python
mit
4,611
0.004121
#!/usr/bin/env python3 """Combine logs from multiple bitcore nodes as well as the test_framework log. This streams the combined log output to stdout. Use combine_logs.py > outputfile to write to an outputfile.""" import argparse from collections import defaultdict, namedtuple import heapq import itertools import os import re import sys # Matches on the date format at the start of the log event TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}") LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event']) def main(): """Main function. Parses args, reads the log files and renders them as text or html.""" parser = argparse.ArgumentParser(usage='%(prog)s [options] <test temporary directory>', description=__doc__) parser.add_argument('-c', '--color', dest='color', action='store_true', help='outputs the combined log with events colored by source (requires posix terminal colors. Use less -r for viewing)') parser.add_argument('--html', dest='html', action='store_true', help='outputs the combined log as html. Requires jinja2. pip install jinja2') args, unknown_args = parser.parse_known_args() if args.color and os.name != 'posix': print("Color output requires posix terminal colors.") sys.exit(1) if args.html and args.color: print("Only one out of --color or --html should be specified") sys.exit(1) # There should only be one unknown argument - the path of the temporary test directory if len(unknown_args) != 1: print("Unexpected arguments" + str(unknown_args)) sys.exit(1) log_events = read_logs(unknown_args[0]) print_logs(log_events, color=args.color, html=args.html) def read_logs(tmp_dir): """Reads log files. Delegates to generator function get_log_events() to provide individual log events for each of the input log files.""" files = [("test", "%s/test_framework.log" % tmp_dir)] for i in itertools.count(): logfile = "{}/node{}/regtest/debug.log".format(tmp_dir, i) if not os.path.isfile(logfile): break files.append(("node%d" % i, logfile)) return heapq.merge(*[get_log_events(source, f) for source, f in files]) def get_log_events(source, logfile): """Generator function that returns individual log events. Log events may be split over multiple lines. We use the timestamp regex match as the marker for a new log event.""" try: with open(logfile, 'r') as infile: event = '' timestamp = '' for line in infile: # skip blank lines if line == '\n': continue # if this line has a timestamp, it's the start of a new log event. time_match = TIMESTAMP_PATTERN.match(line) if time_match: if event: yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip()) event = line timestamp = time_match.group() # if it doesn't have a timestamp, it's a continuation line of the previous log. else: event += "\n" + line # Flush the final event yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip()) except FileNotFoundError: print("File %s could not be
opened. Continuing without it." % logfile, file=sys.stderr) def print_logs(log_events, color=False, html=False): """Renders the iterator of log events into text or html.""" if not html: colors = defaultdict(lambda: '') if color: colors["test"] = "\033[0;36m" # CYAN colors["node0"] = "\033[0;34m" # BLUE colors["node1"] = "\033[0;32m" # GR
EEN colors["node2"] = "\033[0;31m" # RED colors["node3"] = "\033[0;33m" # YELLOW colors["reset"] = "\033[0m" # Reset font color for event in log_events: print("{0} {1: <5} {2} {3}".format(colors[event.source.rstrip()], event.source, event.event, colors["reset"])) else: try: import jinja2 except ImportError: print("jinja2 not found. Try `pip install jinja2`") sys.exit(1) print(jinja2.Environment(loader=jinja2.FileSystemLoader('./')) .get_template('combined_log_template.html') .render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events])) if __name__ == '__main__': main()
gres147679/IngSoftwareRectaFinal
Tarea5/ServiSoft/ServiSoft/WebAccess/signalActions.py
Python
gpl-2.0
948
0.033755
import models def borradoPlan(sender, **kwargs): borrada = kwargs['instance'] if borrada.tipo == "pr": aBorrar = models.PlanPrepago.objects.filter(codplan=borrada) else: aBorrar = models.PlanPostpago.objects.filter(codplan=borrada) aBorrar.delete() def insertadoServicio(sender, **kwargs): insertado = kwargs['instance'] print insertado if insertado.id is not None: nuevoPaq = models.Paquete(codpaq=insertado.c
odserv,nombrepaq=insertado.nombreserv + ' Paquete',precio=insertado.costo) nuevoPaq.save() nuevo
Contiene = models.Contiene(codpaq=nuevoPaq,codserv=insertado,cantidad=1) nuevoContiene.save() def borradoServicio(sender, **kwargs): borrado = kwargs['instance'] if borrado.id is not None: contieneBorrar = models.Contiene.objects.all().filter(codpaq=borrado.codserv) contieneBorrar.delete() paqBorrar = models.Paquete.objects.all().filter(codpaq=borrado.codserv) paqBorrar.delete()
mzdaniel/oh-mainline
vendor/packages/amqplib/demo/amqp_clock.py
Python
agpl-3.0
2,344
0.005973
#!/usr/bin/env python """ AMQP Clock Fires off simple messages at one-minute intervals to a topic exchange named 'clock', with the topic of the message being the local time as 'year.month.date.dow.hour.minute', for example: '2007.11.26.1.12.33', where the dow (day of week) is 0 for Sunday, 1 for Monday, and so on (similar to Unix crontab). A consumer could then bind a queue to the routing key '#.0' for example to get a message at the beginning of each hour. 2007-11-26 Barry Pederson <bp@barryp.org> """ from datetime import datetime from optparse import OptionParser from time import sleep import amqplib.client_0_8 as amqp Message = amqp.Message EXCHANGE_NAME = 'clock' TOPIC_PATTERN = '%Y.%m.%d.%w.%H.%M' # Python datetime.strftime() pattern def main():
parser = OptionParser() parser.add_option('--host', dest='host', help='AMQP server to connect to (default: %default)', default='localhost') parser.add_option('-u', '--userid', dest='userid', help='AMQP userid to authenticate as (default: %default)', default='guest') parser.add_option('-p', '--password', dest='password', help='AMQP
password to authenticate with (default: %default)', default='guest') parser.add_option('--ssl', dest='ssl', action='store_true', help='Enable SSL with AMQP server (default: not enabled)', default=False) options, args = parser.parse_args() conn = amqp.Connection(options.host, options.userid, options.password) ch = conn.channel() ch.access_request('/data', write=True, active=True) ch.exchange_declare(EXCHANGE_NAME, type='topic') # Make sure our first message is close to the beginning # of a minute now = datetime.now() if now.second > 0: sleep(60 - now.second) while True: now = datetime.now() msg = Message(timestamp=now) msg_topic = now.strftime(TOPIC_PATTERN) ch.basic_publish(msg, EXCHANGE_NAME, routing_key=msg_topic) # Don't know how long the basic_publish took, so # grab the time again. now = datetime.now() sleep(60 - now.second) ch.close() conn.close() if __name__ == '__main__': main()
arju88nair/projectCulminate
venv/lib/python3.5/site-packages/nltk/app/__init__.py
Python
apache-2.0
1,733
0.000577
# Natural Language Toolkit: Applications package # # Copyright (C) 2001-2017 NLTK Project # Author: Edward Loper <edloper@gmail.com> # Steven Bird <stevenbird1@gmail.com> # URL: <http://nltk.org/> # For license information, see LICENSE.TXT """ Interactive NLTK Applications: chartparser: Chart Parser chunkparser: Regular-Expression Chunk Parser collocations: Find collocations in text concordance: Part-of-speech concordancer nemo: Finding (and Replacing) Nemo regular expression tool rdparser: Recursive Descent Parser srparser: Shift-Reduce Parser wordnet: WordNet Browser """ # Import Tkinter-based modules if Tkinter is installed try: from six.moves import tkinter except ImportError: import warnings warnings.warn("nltk.app package not loaded " "(please install Tkinter library).") else: from nltk.app.chartparser_app import app as chartparser from nltk.app.chunkparser_app import app as chunkparser from nltk.app.collocations_app import app as collocations from nltk.app.concordance_app import app as concordance from nltk.app.nemo_app import app as nemo from
nltk.app.rdparser_app import app as rdparser from nltk.app.srparser_app import app as srparser from nltk.app.wordnet_app import app as wordnet try: from matplotlib import pylab except ImportError: import warnings warnings.warn("nltk.app.wordfreq not loaded " "(requires the matplotlib library).") else: from nltk.ap
p.wordfreq_app import app as wordfreq # skip doctests from this package def setup_module(module): from nose import SkipTest raise SkipTest("nltk.app examples are not doctests")
jordanemedlock/psychtruths
temboo/core/Library/Amazon/S3/CopyObject.py
Python
apache-2.0
9,236
0.005414
# -*- coding: utf-8 -*- ############################################################################### # # CopyObject # Makes a copy of an existing object in S3 Storage. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class CopyObject(Choreography): def __init__(self, temboo_session): """ Create a new instance of the CopyObject Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(CopyObject, self).__init__(temboo_session, '/Library/Amazon/S3/CopyObject') def new_input_set(self): return CopyObjectInputSet() def _make_result_set(self, result, path): return CopyObjectResultSet(result, path) def _make_execution(self, session, exec_id, path): return CopyObjectChoreographyExecution(session, exec_id, path) class CopyObjectInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the CopyObject Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_AWSAccessKeyId(self, value): """ Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.) """ super(CopyObjectInputSet, self)._set_input('AWSAccessKeyId', value) def set_AWSSecretKeyId(self, value): """ Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.) """ super(CopyObjectInputSet, self)._set_input('AWSSecretKeyId', value) def set_BucketName(self, value): """ Set the value of the BucketName input for this Choreo. ((required, string) The name of the bucket that will be the file destination.) """ super(CopyObjectInputSet, self)._set_input('BucketName', value) def set_CannedACL(self, value): """ Set the value of the CannedACL input for this Choreo. ((optional, string) By default all objects are private (only owner has full access control). Valid values: private, public-read, public-read-write, authenticated-read, bucket-owner-read, bucket-owner-full-control.) """ super(CopyObjectInputSet, self)._set_input('CannedACL', value) def set_ContentType(self, value): """ Set the value of the ContentType input for this Choreo. ((optional, string) ContentType. Default is application/octet-stream.) """ super(CopyObjectInputSet, self)._set_input('ContentType', value) def set_FileToCopy(self, value): """ Set the value of the FileToCopy input for this Choreo. ((required, string) The name of the file to copy.) """ super(CopyObjectInputSet, self)._set_input('FileToCopy', value) def set_IfMatch(self, value): """ Set the value of the IfMatch input for this Choreo. ((optional, string) Copies the object if its entity tag (ETag) matches the specified tag; otherwise returns a 412 HTTP status code error (failed precondition).) """ super(CopyObjectInputSet, self)._set_input('IfMatch', value) def set_IfModifiedSince(self, value): """ Set the value of the IfModifiedSince input for this Choreo. ((optional, date) Copies if it has been modified since the specified time; otherwise returns a 412 HTTP status code error (failed precondition). Must be valid HTTP date. Can be used with IfMatch only.) """ super(CopyObjectInputSet, self)._set_input('IfModifiedSince', value) def set_IfNoneMatch(self, value): """ Set the value of the IfNoneMatch input for this Choreo. ((optional, string) Copies the object if its entity tag (ETag) is different from the specified tag; otherwise returns a 412 HTTP status code error (failed precondition).) """ super(CopyObjectInputSet, self)._set_input('IfNoneMatch', value) def set_IfUnmodifiedSince(self, value): """ Set the value of the IfUnmodifiedSince input for this Choreo. ((optional, date) Copies if it hasn't been modified since the specified time; otherwise returns a 412 HTTP status code error (failed precondition). Must be valid HTTP date. Can be used with IfMatch or IfNoneMatch only.) """ super(CopyObjectInputSet, self)._set_input('IfUnmodifiedSince', value) def set_NewFileName(self, value): """ Set the value of the NewFileName input for this Choreo. ((required, string) The file name for the new copy.) """ super(CopyObjectInputSet, self)._set_input('NewFileName', value) def set_ResponseFormat(self, value): """ Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".) """ super(CopyObjectInputSet, self)._set_input('ResponseFormat', value) def set_SSECAlgorithm(self, value): """ Set the value of the SSECAlgorithm input for this Choreo. ((optional, string) Specifies the server-side encryption with customer-provided encryption keys (SSE-C) algorithm to use when Amazon S3 creates the target object. Valid value: AES256.) """ super(CopyObjectInputSet, self)._set_input('SSECAlgorithm', value) def set_SSECKey(self, value): """ Set the value of the SSECKey input for this Choreo. ((optional, string) The customer-provided AES-256 256-bit (32-byte) encryption key for Amazon S3 to use to encrypt or decrypt your copied data object.) """ super(CopyObjectInputSet, self)._set_input('SSECKey', value) def set_SSECSourceAlgorithm(self, value): """ Set the value of the SSECSourceAlgorithm input for this Choreo. ((optional, string) Specifies the server-side encryption with customer-provided encryption keys (SSE-C) algorithm to use to decrypt the Amazon S3 source object being copied. Valid value: AES256.) """ super(CopyObjectInputSet, self)._set_input('SSECSourceAlgorithm', value) def set_SSECSourceKey(self, value): """ Set the value of the SSECSourceKey input for this Choreo. ((optional, string) The customer-provided AES-256 256-bit (32-byte) encryption key for Amazon S3 to use to decrypt the copy source object.) """ super(CopyObjectInputSet, self)._set_input('SSECSourceKey', value) d
ef set_ServerSideEncryption(self, value): """ Set the value of the ServerSideEncryption input for this Choreo. ((optional, string) Specifies the server-side encryption algorithm to use when Amazon S3 creates the target object. Valid value: AES256.) """ super(CopyObjectInputSet, self)._set_input('ServerSid
eEncryption', value) def set_StorageClass(self, value): """ Set the value of the StorageClass input for this Choreo. ((optional, string) Enables RRS customers to store their noncritical, reproducible data at lower levels of redundancy than Amazon S3's standard storage. Valid Values: STANDARD (default), REDUCED_REDUNDANCY.) """ super(CopyObjectInputSet, self)._set_input('StorageClass', value) def set_WebsiteRedirectLocation(self, value): """
r0mai/metashell
3rd/templight/llvm/projects/compiler-rt/test/ubsan_minimal/lit.common.cfg.py
Python
gpl-3.0
1,525
0.013115
# -*- Python -*- import os def get_required_attr(config, attr_name): attr_value = getattr(config, attr_name, None) if attr_value == None: lit_config.fatal(
"No attribute %r in test configuration! You may need to run " "tests from your build directory or add this attribute " "to lit.site.cfg.py " % attr_name) return attr_value # Setup source root. config.test_source_root = os.path.di
rname(__file__) config.name = 'UBSan-Minimal-' + config.target_arch def build_invocation(compile_flags): return " " + " ".join([config.clang] + compile_flags) + " " target_cflags = [get_required_attr(config, "target_cflags")] clang_ubsan_cflags = ["-fsanitize-minimal-runtime"] + target_cflags clang_ubsan_cxxflags = config.cxx_mode_flags + clang_ubsan_cflags # Define %clang and %clangxx substitutions to use in test RUN lines. config.substitutions.append( ("%clang ", build_invocation(clang_ubsan_cflags)) ) config.substitutions.append( ("%clangxx ", build_invocation(clang_ubsan_cxxflags)) ) # Default test suffixes. config.suffixes = ['.c', '.cc', '.cpp'] # Check that the host supports UndefinedBehaviorSanitizerMinimal tests if config.host_os not in ['Linux', 'FreeBSD', 'NetBSD', 'Darwin', 'OpenBSD']: # TODO: Windows config.unsupported = True # Don't target x86_64h if the test machine can't execute x86_64h binaries. if '-arch x86_64h' in target_cflags and 'x86_64h' not in config.available_features: config.unsupported = True config.available_features.add('arch=' + config.target_arch)
gulopine/steel
steel/fields/__init__.py
Python
bsd-3-clause
214
0
from steel.fields.base import * from steel.fields.numbers import * from steel.fields.strings import * from steel.fields.compression import * from
steel.fields.co
mpound import * from steel.fields.integrity import *
jrha/artemis
tools/artemis-plot.py
Python
gpl-3.0
5,014
0.008975
#!/usr/bin/env python # # Copyright Science and Technology Facilities Council, 2009-2012. # # This file is part of ARTEMIS. # # ARTEMIS is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ARTEMIS is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ARTEMIS. If not, see <http://www.gnu.org/licenses/>. # import urllib2, datetime import numpy as np import matplotlib.pyplot as plt from matplotlib.mlab import griddata from matplotlib import colors from sys import exit as sys_exit #Fall back to simplejson for versions of python < 2.5 (simplejson requires seperate install) try: import json except ImportError: try: import simplejson as json except ImportError: sys_exit("ERROR: Unable to find a usable json module, is simplejson installed?") TEMP_MIN = 15 TEMP_MAX = 38 DPI = 106 cdict = { 'red' : ((0.0, 0.1, 0.1), (0.25, 0.0, 0.0), (0.5, 1.0, 1.0), (0.75, 1.0, 1.0), (1.0, 1.0, 1.0)), 'green' : ((0.0, 0.1, 0.1), (0.25, 0.0, 0.0), (0.5, 0.0, 0.0), (0.75, 1.0, 1.0), (1.0, 1.0, 1.0)), 'blue' : ((0.0, 0.1, 0.1), (0.25, 0.5, 0.5), (0.5, 0.0, 0.0), (0.75, 0
.0, 0.0), (1.0, 1.0, 1.0))
, } my_cmap = colors.LinearSegmentedColormap('my_colormap',cdict,256) #pcolor(rand(10,10),cmap=plt.cm.jet) def process(d, f, mode): x = [] y = [] z = [] for i in d: if "TEMPERATURE" in i[0]: r = float(i[3]) c = float(i[4]) v = float(i[1]) x.append(r) y.append(c) z.append(v) plot(x, y, z, "R89 HPD Room", f, mode) def plot(x, y, z, title, filename, mode): w = max(x) - min(x) h = max(y) - min(y) xi = np.linspace(min(x), max(x), w * 4) yi = np.linspace(min(y), max(y), h * 4) zi = griddata(x,y,z,xi,yi) x = np.array(x) y = np.array(y) z = np.array(z) plt.scatter(x,y,marker='o',c='b',s=5,zorder=10) #CS = plt.contour(xi,yi,zi,15,linewidths=0.5,colors='k') #CS = plt.contourf(xi,yi,zi,15,cmap=plt.cm.jet) #CS = plt.contourf(xi,yi,zi,15,cmap=plt.cm.jet) plt.pcolor(xi,yi,zi,cmap=plt.cm.jet) plt.colorbar() plt.clim(TEMP_MIN, TEMP_MAX) ax = plt.axes() ax.set_aspect('equal') plt.xlim(min(x), max(x)) plt.ylim(max(y), min(y)) if mode == "range": f = "hm/hm_%s.png" % filename else: f = filename if mode == "gui": plt.show() plt.suptitle(title) plt.colorbar() else: for a in ax.get_xticklabels(): a.set_visible(False) for a in ax.get_yticklabels(): a.set_visible(False) plt.savefig(f, dpi=DPI) print("Wrote " + f) plt.clf() if __name__ == "__main__": from optparse import OptionParser VERSION = "1.0" parser = OptionParser(version=VERSION) parser.usage = " %prog URL [options]" parser.description = "A utility to plot heatmaps from artemis probe data." parser.add_option("--mode", metavar="STR", dest="mode", default="single", help="Run mode (single, gui or range)") parser.add_option("--filename", metavar="STR", dest="filename", default="heatmap.png", help="Output filename (ignored in gui and range modes)") (options, args) = parser.parse_args() if len(args) == 1: url = args[0] p = urllib2.urlopen(url) p = json.load(p) if options.mode == "gui" or options.mode == "single": p = p["probes"] process(p, options.filename, options.mode) elif options.mode == "range": (time_start, period, time_end, p) = p time_start = int(time_start) period = int(period) time_end = int(time_end) for t in p.items(): (t,rv) = t x = [] y = [] z = [] for r in rv: (r,c,v) = r if v <> None: x.append(float(r)) y.append(float(c)) z.append(float(v)) if (len(x) == len(y)) and (len(x) == len(z)) and (len(x) > 0): plot( x, y, z, "R89 HPD Room at %s" % datetime.datetime.fromtimestamp(time_start + period * int(t)).strftime("%Y-%m-%d %H:%M:%S"), "%05d" % int(t), options.mode ) else: import sys sys.exit("ERROR: Unknown run mode") else: import sys sys.exit("ERROR: URL not specified")
facepalm/bliss-station-game
src/modular_module.py
Python
gpl-3.0
7,899
0.033675
from generic_module import BasicModule from equipment.general import SolarPanel, DOCK_EQUIPMENT, WaterTank, CBM, Window, Battery, Comms from equipment.lifesupport import UniversalToilet, WaterPurifier, OxygenElectrolyzer, RegenerableCO2Filter from equipment.computer import DockingComputer, MissionComputer from equipment.workshop import WorkbenchRack import math import numpy as np from filtering import ClutterFilter import util import globalvars as gv class ModuleComponent(object): def __init__(self,pos = 0): self.size = np.array([ 1 , 4.27 , 4.27 ]) self.sprite = None self.module = None self.nodes = [] self.equipment = [] self.edges = [] self.nodes.append(['hall'+str(pos),[0,0,0]]) self.entry_node = 'hall'+str(pos) self.exit_node = 'hall'+str(pos) if not hasattr(self,'name'): self.name = 'GenericComponent' def refresh_image(self, imgfile, x_off = 0): if self.sprite: self.sprite.delete() import graphics_pyglet self.sprite = graphics_pyglet.LayeredSprite(name=self.name,start_order = -30) img = util.load_image(imgfile ) self.sprite.add_layer(self.name,img) self.sprite._offset = [gv.config['ZOOM'] * 2 * x_off, 0] #,anchor_x= int( #if self.sprite is None: return # def draw(self,window): #off_x = self.sprite.x #self.sprite.update_sprite() self.sprite.draw() def __getstate__(self): d = dict(self.__dict__) del d['sprite'] return d class DockingCap(ModuleComponent): def __init__(self,pos=0): self.name = 'OpenDock'+str(pos) ModuleComponent.__init__(self,pos) self.equipment.append( [ 'CBM'+str(pos), np.array([ -1 , 0 , 0 ]), np.array([ math.pi , 0]), 'CBM', CBM() ] ) #self.edges.append( [ ''.join(['hall',str(pos)]) , ''.join(['CBM',str(pos)]) ] ) def refresh_image(self, x_off = 0): super(DockingCap, self).refresh_image('images/dockcap_comp.png',x_off) class DockingCapClosed(DockingCap): def __init__(self,pos=0): DockingCap.__init__(self,pos) self.equipment[0][1] = np.array([ 1 , 0 , 0]) self.equipment[0][2] = np.array([ 0 , 0]) def refresh_image(self, x_off = 0): super(DockingCap, self).refresh_image('images/dockcap_comp_flip.png',x_off) class DockingHub(ModuleComponent): def __init__(self,pos=0): self.name = 'OpenDock'+str(pos) ModuleComponent.__init__(self,pos) self.size = np.array([ 2 , 4.27 , 4.27 ]) self.equipment.append( [ 'CBM-L'+str(pos), np.array([ 0 , 1 , 0 ]), np.array([ math.pi/2 , 0]), 'CBM', CBM() ] ) self.equipment.append( [ 'CBM-R'+str(pos), np.array([ 0 , -1 , 0 ]), np.array([ -math.pi/2 , 0]), 'CBM', CBM() ] ) #self.edges.append( [ ''.join(['hall',str(pos)]) , ''.join(['CBM',str(pos)]) ] ) def refresh_image(self, x_off = 0): super(DockingHub, self).refresh_image('images/double_comp.png',x_off) class WorkshopRing(ModuleComponent): def __init__(self,pos=0): self.name = 'Workshop ring'+str(pos) ModuleComponent.__init__(self,pos) _sampdict = { 'nadir' : [0, -0.5, 0, -math.pi]} for _d in _sampdict.keys(): self.equipment.append([ ''.join( [ _d , str( pos ) ] ), np.array([ 0 , _sampdict[_d][0] , _sampdict[_d][1] ]) , np.array([ _sampdict[_d][2] , _sampdict[_d][3] ]), 'WORKSHOP', WorkbenchRack() ]) #self.edges.append( [ ''.join( [ 'hall' , str( pos ) ] ) , ''.join( [ _d , str( pos ) ] ) ] ) def refresh_image(self, x_off = 0): super(WorkshopRing, self).refresh_image('images/rack_comp.png',x_off) class RackRing(ModuleComponent): def __init__(self,pos=0): self.name = 'Rack ring'+str(pos)
ModuleComponent.__init__(self,pos) _sampdict = {'port' : [ -0.5, 0, math.pi, 0 ], 'starboard' : [ 0.5 , 0, -math.pi, 0 ], 'nadir' : [0, -0.5, 0, -math.pi]} for _d in _sampdict.keys(): self.equipment.append([ ''.join( [ _d , str( pos ) ] ), np.array([ 0 , _sampdict[_d][0] , _sampdict[_d][1] ]) , np.array([ _sampdict[_d][2] , _sampdict[_d][3] ]), 'RACK', None ]) #self.edges.append( [ ''.join( [ 'hall' , str( pos ) ] ) , ''.join( [ _d , str( p
os ) ] ) ] ) def refresh_image(self, x_off = 0): super(RackRing, self).refresh_image('images/rack_comp.png',x_off) def spawn_component(letter,pos=0): if letter in '{': return DockingCap(pos) elif letter in '}': return DockingCapClosed(pos) elif letter in 'r': return RackRing(pos) elif letter in 'O': return DockingHub(pos) elif letter in 'w': return WorkshopRing(pos) class ModularModule(BasicModule): def __init__(self,name = "Module", build_str = "{Orrrwrrr}" ): self.component_string = build_str self.components=[] self.name=name for ec,c in enumerate(self.component_string): newc = spawn_component(c,ec) if newc is not None: newc.module = self self.components.append(newc) self.refresh_size() BasicModule.__init__(self) x_off = -self.size[0]/2 path_node = None for c in self.components: for n in c.nodes: self.nodes[self.node(n[0])] = np.array([2,1,1])*(np.array([x_off,0,0]) + c.size*(n[1]+np.array([1,0,0]))/np.array([2,1,1]) )/self.size for e in c.equipment: loc = np.array([2,1,1])*(np.array([x_off,0,0]) + c.size*(e[1]+np.array([1,0,0]))/np.array([2,1,1]) )/self.size self.add_equipment(e[0], e[4].install(self) if e[4] else None, loc, eq_orientation=e[2], eq_type=e[3] ) for e in c.edges: self.add_edge(e[0],e[1]) if path_node: self.add_edge( self.node(path_node), self.node(c.entry_node) ) path_node = c.exit_node x_off += c.size[0] self.refresh_image() def refresh_size(self): x,y,z = 0,0,0 for c in self.components: x += c.size[0] y = max(y,c.size[1]) z = max(z,c.size[2]) self.size = np.array([ x , y , z ]) def refresh_image(self): if not gv.config['GRAPHICS']: return if gv.config['GRAPHICS'] == 'pyglet': import graphics_pyglet if self.sprite: self.sprite.delete() self.sprite = None#graphics_pyglet.LayeredSprite(name=self.name,start_order = -30) x_off = -self.size[0]/2 for c in self.components: #c.sprite = self.sprite x_off += c.size[0] / 2.0 c.refresh_image(x_off) x_off += c.size[0] / 2.0 def check_collision(self,x,y): for c in self.components: if c.sprite and c.sprite.contains(x,y): return True return False def draw(self,window): zoom=gv.config['ZOOM'] for c in self.components: l=self.location c.sprite.update_sprite(zoom*l[0], zoom*l[1],-180*(self.orientation[0])/math.pi) c.draw(window) #self.img.blit(zoom*self.location[0]+window.width // 2, zoom*self.location[1]+window.height // 2, 0) #if self.sprite and hasattr(self.sprite, 'update_sprite'): # l=self.location # self.sprite.update_sprite(zoom*l[0], zoom*l[1],-180*(self.orientation[0])/math.pi) BasicModule.draw(self,window)
botswana-harvard/edc-sms
edc_sms/settings.py
Python
gpl-2.0
4,211
0.00095
""" Django settings for edc_sms project. Generated by 'django-admin startproject' using Django 3.0.6. For more information on this file, see https://docs.djangoproject.com/en/3.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.0/ref/settings/ """ import os import configparser from django.core.management.color import color_style APP_NAME = 'edc_sms' SITE_ID = 40 style = color_style() # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) ETC_DIR = '/etc/' CONFIG_FILE = f'{APP_NAME}.ini' CONFIG_PATH = os.path.join(ETC_DIR, APP_NAME, CONFIG_FILE) config = configparser.ConfigParser() config.read(CONFIG_PATH) BASE_API_URL = config['edc_sms']['base_api_url'] # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'pyl0n2@jo6os@3ft62-rcn%53^2n8#1(9ff%m%v!g24yzhy5x*' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', '
django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.sites', 'django_crypto_fields.apps.AppConfig', 'django_extensions', 'simple_history', 'django_apscheduler', 'edc_model_admin.apps.AppConfig', 'edc_base.apps.AppConfig', 'edc_devic
e.apps.AppConfig', 'edc_identifier.apps.AppConfig', 'edc_sms.apps.AppConfig', 'django_q' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'edc_dashboard.middleware.DashboardMiddleware', ] ROOT_URLCONF = 'edc_sms.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'edc_sms.wsgi.application' # Database # https://docs.djangoproject.com/en/3.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Django q configurations Q_CLUSTER = { 'name': 'edc_sms', 'retry': 60, 'orm': 'default', } # Password validation # https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # dashboards DASHBOARD_URL_NAMES = { 'contact_listboard_url': 'contact_listboard_url', } LAB_DASHBOARD_URL_NAMES = {} DASHBOARD_BASE_TEMPLATES = { 'listboard_base_template': 'edc_sms/base.html', 'contact_listboard_template': 'edc_sms/listboard.html', } # Internationalization # https://docs.djangoproject.com/en/3.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'Africa/Gaborone' USE_I18N = True USE_L10N = False USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.0/howto/static-files/ STATIC_URL = '/static/'
synappio/swagger-pyramid
swagger/models.py
Python
apache-2.0
42
0.02381
def gene
rate_ming_models(models): pass
albertosalmeronunefa/tuconsejocomunal
addons/l10n_ve_dpt/models/l10n_ve_dpt.py
Python
gpl-3.0
2,432
0.004934
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public Licen
se for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## # Generated by
the Odoo plugin for Dia ! from odoo import api, fields, models class CountryState(models.Model): """ Add Municipalities reference in State """ _name = 'res.country.state' _inherit = 'res.country.state' _description="Country states" municipality_id = fields.One2many('res.country.state.municipality', 'state_id', 'Municipalities in this state') class StateMunicipality(models.Model): """States Municipalities""" _name = 'res.country.state.municipality' _description="State municipalities" state_id = fields.Many2one('res.country.state', 'State', required=True, help='Name of the State to which the municipality belongs') name = fields.Char('Municipality', required=True, help='Municipality name') code = fields.Char('Code', size=3, required=True, help='Municipality code in max. three chars.') parish_id = fields.One2many('res.country.state.municipality.parish', 'municipality_id', 'Parishes in this municipality') class MunicipalityParish(models.Model): """States Parishes""" _name = 'res.country.state.municipality.parish' _description="Municipality parishes" municipality_id = fields.Many2one('res.country.state.municipality', 'Municipality', help='Name of the Municipality to which the parish belongs') name = fields.Char('Parish', required=True, help='Parish name') code = fields.Char('Name',size=3, required=True, help='Parish Code in max. three chars.')
raviolli77/machineLearning_breastCancer_Python
src/python/produce_model_metrics.py
Python
mit
2,206
0.006346
import sys from sklearn.metrics impo
rt roc_curve from
sklearn.metrics import auc # Function for All Models to produce Metrics --------------------- def produce_model_metrics(fit, test_set, test_class_set, estimator): """ Purpose ---------- Function that will return predictions and probability metrics for said predictions. Parameters ---------- * fit: Fitted model containing the attribute feature_importances_ * test_set: dataframe/array containing the test set values * test_class_set: array containing the target values for the test set * estimator: String represenation of appropriate model, can only contain the following: ['knn', 'rf', 'nn'] Returns ---------- Box plot graph for all numeric data in data frame """ my_estimators = { 'rf': 'estimators_', 'nn': 'out_activation_', 'knn': '_fit_method' } try: # Captures whether first parameter is a model if not hasattr(fit, 'fit'): return print("'{0}' is not an instantiated model from scikit-learn".format(fit)) # Captures whether the model has been trained if not vars(fit)[my_estimators[estimator]]: return print("Model does not appear to be trained.") except KeyError as e: raise KeyError("'{0}' does not correspond with the appropriate key inside the estimators dictionary. \ Please refer to function to check `my_estimators` dictionary.".format(estimator)) # Outputting predictions and prediction probability # for test set predictions = fit.predict(test_set) accuracy = fit.score(test_set, test_class_set) # We grab the second array from the output which corresponds to # to the predicted probabilites of positive classes # Ordered wrt fit.classes_ in our case [0, 1] where 1 is our positive class predictions_prob = fit.predict_proba(test_set)[:, 1] # ROC Curve stuff fpr, tpr, _ = roc_curve(test_class_set, predictions_prob, pos_label = 1) auc_fit = auc(fpr, tpr) return {'predictions': predictions, 'accuracy': accuracy, 'fpr': fpr, 'tpr': tpr, 'auc': auc_fit}
Punzo/SlicerAstro
AstroMomentMaps/Testing/Python/AstroMomentMapsSelfTest.py
Python
bsd-3-clause
6,817
0.008655
import os import unittest import vtk, qt, ctk, slicer import math import sys # # AstroMomentMapsSelfTest # class AstroMomentMapsSelfTest: def __init__(self, parent): parent.title = "Astro MomentMaps SelfTest" parent.categories = ["Testing.TestCases"] parent.dependencies = ["AstroVolume"] parent.contributors = [""" Davide Punzo (Kapteyn Astronomical Institute) and Thijs van der Hulst (Kapteyn Astronomical Institute)."""] parent.helpText = """ This module was developed as a self test to perform the operations needed for generating moment maps. """ parent.acknowledgementText = """ """ # replace with organization, grant and thanks. self.parent = parent # Add this test to the SelfTest module's list for discovery when the module # is created. Since this module may be discovered before SelfTests itself, # create the list if it doesn't already exist. try: slicer.selfTests except AttributeError: slicer.selfTests = {} slicer.selfTests['Astro MomentMaps SelfTest'] = self.runTest def runTest(self): tester = AstroMomentMapsSelfTestTest() tester.runTest() # # qAstroMomentMapsSelfTestWidget # class AstroMomentMapsSelfTestWidget: def __init__(self, parent = None): if not parent:
self.parent = slicer.qMRMLWidget
() self.parent.setLayout(qt.QVBoxLayout()) self.parent.setMRMLScene(slicer.mrmlScene) else: self.parent = parent self.layout = self.parent.layout() if not parent: self.setup() self.parent.show() def setup(self): # Instantiate and connect widgets ... # reload button # (use this during development, but remove it when delivering # your module to users) self.reloadButton = qt.QPushButton("Reload") self.reloadButton.toolTip = "Reload this module." self.reloadButton.name = "AstroMomentMapsSelfTest Reload" self.layout.addWidget(self.reloadButton) self.reloadButton.connect('clicked()', self.onReload) # reload and test button # (use this during development, but remove it when delivering # your module to users) self.reloadAndTestButton = qt.QPushButton("Reload and Test") self.reloadAndTestButton.toolTip = "Reload this module and then run the self tests." self.layout.addWidget(self.reloadAndTestButton) self.reloadAndTestButton.connect('clicked()', self.onReloadAndTest) # Add vertical spacer self.layout.addStretch(1) def cleanup(self): pass def onReload(self,moduleName="AstroMomentMapsSelfTest"): """Generic reload method for any scripted module. ModuleWizard will subsitute correct default moduleName. """ globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName) def onReloadAndTest(self,moduleName="AstroMomentMapsSelfTest"): self.onReload() evalString = 'globals()["%s"].%sTest()' % (moduleName, moduleName) tester = eval(evalString) tester.runTest() # # AstroMomentMapsSelfTestLogic # class AstroMomentMapsSelfTestLogic: """This class should implement all the actual computation done by your module. The interface should be such that other python code can import this class and make use of the functionality without requiring an instance of the Widget """ def __init__(self): pass def hasImageData(self,volumeNode): """This is a dummy logic method that returns true if the passed in volume node has valid image data """ if not volumeNode: print('no volume node') return False if volumeNode.GetImageData() is None: print('no image data') return False return True class AstroMomentMapsSelfTestTest(unittest.TestCase): """ This is the test case for your scripted module. """ def delayDisplay(self,message,msec=100): """This utility method displays a small dialog and waits. This does two things: 1) it lets the event loop catch up to the state of the test so that rendering and widget updates have all taken place before the test continues and 2) it shows the user/developer/tester the state of the test so that we'll know when it breaks. """ print(message) self.info = qt.QDialog() self.infoLayout = qt.QVBoxLayout() self.info.setLayout(self.infoLayout) self.label = qt.QLabel(message,self.info) self.infoLayout.addWidget(self.label) qt.QTimer.singleShot(msec, self.info.close) self.info.exec_() def setUp(self): slicer.mrmlScene.Clear(0) def runTest(self): self.setUp() self.test_AstroMomentMapsSelfTest() def test_AstroMomentMapsSelfTest(self): print("Running AstroMomentMapsSelfTest Test case:") self.downloadWEIN069() astroVolume = slicer.util.getNode("WEIN069") rms = astroVolume.GetDisplayThreshold() mainWindow = slicer.util.mainWindow() mainWindow.moduleSelector().selectModule('AstroVolume') mainWindow.moduleSelector().selectModule('AstroMomentMaps') astroMomentMapsModule = module = slicer.modules.astromomentmaps astroMomentMapsModuleWidget = astroMomentMapsModule.widgetRepresentation() AstroMomentMapsParameterNode = slicer.util.getNode("AstroMomentMapsParameters") AstroMomentMapsParameterNode.SetIntensityMin(rms * 3) QPushButtonList = astroMomentMapsModuleWidget.findChildren(qt.QPushButton) for QPushButton in (QPushButtonList): if QPushButton.name == "ApplyButton": ApplyPushButton = QPushButton self.delayDisplay('Calculating moment maps', 700) ApplyPushButton.click() ZeroMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetZeroMomentVolumeNodeID()) pixelValue0 = ZeroMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0) FirstMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetFirstMomentVolumeNodeID()) pixelValue1 = FirstMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0) SecondMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetSecondMomentVolumeNodeID()) pixelValue2 = SecondMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0) if (math.fabs(pixelValue0 - 0.511788547039) < 1.e-6 and \ math.fabs(pixelValue1 - 5231.70947266) < 1.e-6 and \ math.fabs(pixelValue2 - 28.8058509827) < 1.e-6): self.delayDisplay('Test passed', 700) else: self.delayDisplay('Test failed', 700) # if run from Slicer interface remove the followinf exit sys.exit() def downloadWEIN069(self): import AstroSampleData astroSampleDataLogic = AstroSampleData.AstroSampleDataLogic() self.delayDisplay('Getting WEIN069 Astro Volume') WEIN069Volume = astroSampleDataLogic.downloadSample("WEIN069") return WEIN069Volume
rdolgushin/pgdumper
setup.py
Python
mit
642
0.034268
from setuptools import setup setup( name = 'pgdumper', description = 'Simple PostgreSQL dumper', author = 'Roman Dolgushin', author_em
ail = 'rd@roman-dolgushin.ru', url = 'https://github.com/rdolgushin/pgdumper', license = 'MIT', version = '0.1', packages = ['pgdumper'], install_requires = ['mydumper'], entry_points = { 'console_scripts': ['pgdumper = pgdumper.pgdumper:main'] }, classifiers = [ 'Topic :: Utilities', 'Topic :: Database', 'Topic :: System :: Systems Administration', 'Environment :: Console', 'License :: OSI Approved :: MIT License', 'Operating System :: POSIX'
], )
kyuupichan/electrum
gui/kivy/uix/dialogs/fee_dialog.py
Python
mit
4,529
0.001987
from kivy.app import App from kivy.factory import Factory from kivy.properties import ObjectProperty from kivy.lang import Builder from electrum_gui.kivy.i18n import _ Builder.load_string(''' <FeeDialog@Popup> id: popup title: _('Transaction Fees') size_hint: 0.8, 0.8 pos_hint: {'top':0.9} method: 0 BoxLayout: orientation: 'vertical' BoxLayout: orientation: 'horizontal' size_hint: 1, 0.5 Label: text: _('Method') + ':' Button: text: _('Mempool') if root.method == 2 else _('ETA') if root.method == 1 else _('Static') background_color: (0,0,0,0) bold: True on_release: root.method = (root.method + 1) % 3 root.update_slider() root.update_text() BoxLayout: orientation: 'horizontal' size_hint: 1, 0.5 Label: text: (_('Target') if root.method > 0 else _('Fee')) + ':' Label: id: fee_target text: '' Slider: id: slider range: 0, 4 step: 1 on_value: root.on_slider(self.value) Widget: size_hint: 1, 0.5 BoxLayout: orientation: 'horizontal' size_hint: 1, 0.5 TopLabel: id: fee_estimate text: '' font_size: '14dp' Widget: size_hint: 1, 0.5 BoxLayout:
orientation: 'horizontal' size_hint: 1, 0.5 Button: text: 'Cancel' size_hint: 0.5, None height: '48dp'
on_release: popup.dismiss() Button: text: 'OK' size_hint: 0.5, None height: '48dp' on_release: root.on_ok() root.dismiss() ''') class FeeDialog(Factory.Popup): def __init__(self, app, config, callback): Factory.Popup.__init__(self) self.app = app self.config = config self.callback = callback mempool = self.config.use_mempool_fees() dynfees = self.config.is_dynfee() self.method = (2 if mempool else 1) if dynfees else 0 self.update_slider() self.update_text() def update_text(self): pos = int(self.ids.slider.value) dynfees, mempool = self.get_method() if self.method == 2: fee_rate = self.config.depth_to_fee(pos) target, estimate = self.config.get_fee_text(pos, dynfees, mempool, fee_rate) msg = 'In the current network conditions, in order to be positioned %s, a transaction will require a fee of %s.' % (target, estimate) elif self.method == 1: fee_rate = self.config.eta_to_fee(pos) target, estimate = self.config.get_fee_text(pos, dynfees, mempool, fee_rate) msg = 'In the last few days, transactions that confirmed %s usually paid a fee of at least %s.' % (target.lower(), estimate) else: fee_rate = self.config.static_fee(pos) target, estimate = self.config.get_fee_text(pos, dynfees, True, fee_rate) msg = 'In the current network conditions, a transaction paying %s would be positioned %s.' % (target, estimate) self.ids.fee_target.text = target self.ids.fee_estimate.text = msg def get_method(self): dynfees = self.method > 0 mempool = self.method == 2 return dynfees, mempool def update_slider(self): slider = self.ids.slider dynfees, mempool = self.get_method() maxp, pos, fee_rate = self.config.get_fee_slider(dynfees, mempool) slider.range = (0, maxp) slider.step = 1 slider.value = pos def on_ok(self): value = int(self.ids.slider.value) dynfees, mempool = self.get_method() self.config.set_key('dynamic_fees', dynfees, False) self.config.set_key('mempool_fees', mempool, False) if dynfees: if mempool: self.config.set_key('depth_level', value, True) else: self.config.set_key('fee_level', value, True) else: self.config.set_key('fee_per_kb', self.config.static_fee(value), True) self.callback() def on_slider(self, value): self.update_text()
jpardobl/monscale
monscale/pypelib/Rule.py
Python
bsd-3-clause
3,386
0.056113
import os import sys import time import exceptions import uuid import logging ''' @author: msune,lbergesio,omoya,CarolinaFernandez @organization: i2CAT, OFELIA FP7 PolicyEngine Rule class Encapsulates logic of a simple Rule ''' from monscale.pypelib.Condition import Condition from monscale.pypelib.persistence.PersistenceEngine import PersistenceEngine from monscale.pypelib.utils.Logger import Logger class TerminalMatch(exceptions.Exception): value = None desc = None def __init__(self,rType,desc): if isinstance(rType['value'],bool): self.value = rType['value'] else: raise Exception("Unknown rule type") self.desc = desc def __str__(self): return "%s "%self.desc class Rule(): logger = Logger.getLogger() #Class Attributes _condition = None _description = None _errorMsg = None _uuid = None #uuid.uuid4().hex _defaultParser = "RegexParser" _defaultPersistence = "Django" #Types of rule POSITIVE_TERMINAL={'value':True,'terminal':True} POSITIVE_NONTERMINAL={'value':True,'terminal':False} NEGATIVE_TERMINAL={'value':False,'terminal':True} NEGATIV
E_NONTERMINAL={'value':False,'terminal':False} _types = [POSITIVE_TERMINAL,POSITIVE_NONTERMINAL,NEGATIVE_TERMINAL, NEGATIVE_NONTERMINAL] #Rule type _type = None #Rule match Action _matchAction=None #Getters def getCondition(self): return self._condition def getDescription(self): return self._description def getType(self): return self._type def getErrorMsg(self): return self._errorMsg def getMatchAction(self):
return self._matchAction def getUUID(self): return self._uuid #setters def setUUID(self,UUID): self._uuid = UUID #Constructor def __init__(self,condition,description,errorMsg,ruleType=POSITIVE_TERMINAL,action=None,uuid=None): if not isinstance(condition,Condition): raise Exception("Object must be an instance of Condition") if ruleType not in self._types: raise Exception("Unknown rule type") if action == None and (ruleType == self.NEGATIVE_NONTERMINAL or ruleType == self.POSITIVE_NONTERMINAL): raise Exception("You cannot create non-terminal actionless rules") self._condition = condition self._matchAction = action self._type = ruleType self._description = description self._errorMsg = errorMsg self._uuid = uuid def dump(self): #Debug dump toReturn = self._condition.dump() toReturn+="=> %s "%str(self._type['value']) if self._matchAction != None: toReturn += "(%s) "%str(self._matchAction) if self._type['terminal']: toReturn += "[TERM] " if self._description: toReturn+=" #"+self._description return toReturn #Resolver is passed at evaluation time to be able to dynamically redirect actions def evaluate(self,metaObj,resolver): try: result = self._condition.evaluate(metaObj,resolver) Rule.logger.debug('Result was: %s',str(result)) except Exception as e: Rule.logger.error('Error on rule: %s',self.dump()) Rule.logger.error('Exception: %s', str(e)) Rule.logger.error('Rule will be skiped!') result = False if result: if self._matchAction != None: resolver.resolve(self._matchAction,metaObj) #If is terminal raise TerminalMatch if self._type['terminal']: raise TerminalMatch(self._type,self._errorMsg) #return whatever return def getConditionDump(self): return self.getCondition().dump()
magicgoose/simple_dr_meter
audio_io/cue/cue_parser.py
Python
gpl-3.0
2,049
0.000488
import re from enum import Enum, auto from fractions import Fraction from io import BufferedIOBase from numbers import Number from typing import Iterable import chardet class CueCmd(Enum): PERF
ORMER = auto() TITLE = auto() FILE = auto() TRACK = auto() INDEX = auto() REM = auto() EOF = auto() def _unquote(s: str): return s[1 + s.index('"'):s.rindex('"')] _whitespace_pattern = re.compile(r'\s+') _rem_tag_pattern = re.compile(r'([A-Z_]+) (.+)')
def parse_cd_time(offset: str) -> Number: """parse time in CD-DA (75fps) format to seconds, exactly MM:SS:FF""" m, s, f = map(int, offset.split(':')) return m * 60 + s + Fraction(f, 75) def _parse_cue_cmd(line: str, offset_in_seconds: bool = True): line = line.strip() cmd, args = _whitespace_pattern.split(line, 1) if cmd == 'PERFORMER': return CueCmd.PERFORMER, _unquote(args) if cmd == 'TITLE': return CueCmd.TITLE, _unquote(args) if cmd == 'FILE': return CueCmd.FILE, _unquote(args) if cmd == 'TRACK': number, _ = _whitespace_pattern.split(args, 1) number = int(number) return CueCmd.TRACK, number if cmd == 'INDEX': number, offset = _whitespace_pattern.split(args, 1) number = int(number) if offset_in_seconds: offset = parse_cd_time(offset) return CueCmd.INDEX, number, offset if cmd == 'REM': tag_name, tag_value = _rem_tag_pattern.fullmatch(args).groups() return CueCmd.REM, tag_name, tag_value return None def read_cue_from_file(in_path: str) -> str: with open(in_path, 'rb') as f: assert isinstance(f, BufferedIOBase) content = f.read() encoding = chardet.detect(content)['encoding'] return content.decode(encoding) def parse_cue_str(content: str, offset_in_seconds: bool = True) -> Iterable[tuple]: for line in content.splitlines(): cmd = _parse_cue_cmd(line, offset_in_seconds) if cmd: yield cmd yield CueCmd.EOF, None
hackaugusto/raiden
raiden/tests/unit/test_operators.py
Python
mit
2,554
0.000392
from raiden.messages import Processed from raiden.tests.utils import factories from raiden.transfer.events import ( EventPaymentReceivedSuccess, EventPaymentSentFailed, EventPaymentSentSuccess, ) from raiden.transfer.state_change import ActionCancelPayment, Block from raiden.utils import sha3 ADDRESS = sha3(b"foo")[:20] ADDRESS2 = sha3(b"boo")[:20] ADDRESS3 = sha3(b"coo")[:20] ADDRESS4 = sha3(b"goo")[:20] SECRET = b"secret" HASH = sha3(SECRET) HASH2 = sha3(b"joo") def test_transfer_statechange_operators(): # pylint: disable=unneeded-not block_hash = factories.make_transaction_hash() a = Block(block_number=2, gas_limit=1, block_hash=block_hash) b = Block(block_number=2, gas_limit=1, block_hash=block_hash) c = Block(block_number=3, gas_limit=1, block_hash=factories.make_transaction_hash()) assert a == b assert not a != b assert a != c assert not a == c a = ActionCancelPayment(2) b = ActionCancelPayment(2) c = ActionCancelPayment(3) assert a == b assert not a != b assert a != c assert not a == c def test_event_operators(): a = EventPaymentSentSuccess(1, 4, 2, 5, sha3(b"target")) b = EventPaymentSentSuccess(1, 4, 2, 5, sha3(b"target")) c = EventPaymentSentSuccess(2, 7, 3, 4, sha3(b"target")) d = EventPaymentSentSuccess(2, 7, 3, 4, sha3(b"differenttarget")) # pylint: disable=unneeded-not assert a == b assert not a != b assert a != c assert not a == c assert not c == d a = EventPaymentSentFailed(1, 7, 2, "target", "BECAUSE") b = EventPaymentSentFailed(1, 7, 2, "target", "BECAUSE") c = EventPaymentSentFailed(3, 3, 3, "target", "UNKNOWN") assert a == b assert not a != b assert a != c assert not a == c a = EventPaymentReceivedSuccess(4, 4, 2, 5, sha3(b"initiator")) b = EventPaymentReceivedSuccess(4, 4, 2, 5, sha3(b"initiator")) c = EventPaymentReceivedSuccess(1, 2, 3, 5, sha3(b"initiator")) d = EventPaymentReceivedSuccess(1, 2, 3, 5, sha3(b"other initiator"))
assert a == b assert not a != b assert a != c assert not a == c assert c != d assert not c == d def test_message_operators(): message_identifier = 10 message_identifier2 = 11 a = Processed(message_identifier=message_i
dentifier) b = Processed(message_identifier=message_identifier) c = Processed(message_identifier=message_identifier2) # pylint: disable=unneeded-not assert a == b assert not a != b assert a != c assert not a == c
ResearchComputing/RCAMP
rcamp/tests/test_projects_receivers.py
Python
mit
4,732
0.00317
from mock import MagicMock import mock from django.test import override_settings from tests.utilities.utils import SafeTestCase from tests.utilities.ldap import get_ldap_user_defaults from accounts.models import ( User, AccountRequest, Intent ) from projects.models import Project from projects.receivers import ch
eck_general_eligibility
organization_info = { 'ucb': { 'long_name': 'University of Colorado Boulder', 'suffix': None, 'general_project_id': 'ucb-general' }, 'csu': { 'long_name': 'Colorado State University', 'suffix': 'colostate.edu', 'general_project_id': 'csu-general' } } @override_settings(ORGANIZATION_INFO=organization_info) class GeneralEligibilityReceiverTestCase(SafeTestCase): def test_check_general_eligibility(self): user_defaults = get_ldap_user_defaults() auth_user_defaults = dict( username=user_defaults['username'], first_name=user_defaults['first_name'], last_name=user_defaults['last_name'], email=user_defaults['email'] ) auth_user = User.objects.create(**auth_user_defaults) account_request_defaults = dict( username=auth_user.username, first_name=auth_user.first_name, last_name=auth_user.last_name, email=auth_user.email, organization='ucb' ) account_request = AccountRequest.objects.create(**account_request_defaults) intent = Intent.objects.create( account_request=account_request, reason_summit=True ) project_defaults = dict( pi_emails=['pi@email.org'], description='test project', organization='ucb', title='test project', project_id='ucb-general' ) project = Project.objects.create(**project_defaults) check_general_eligibility(account_request.__class__,account_request=account_request) project = Project.objects.get() self.assertIn(auth_user,project.collaborators.all()) # No Summit intention declared, now add to 'general' account anyway project.collaborators.clear() intent.reason_summit = False intent.save() check_general_eligibility(account_request.__class__,account_request=account_request) project = Project.objects.get() self.assertIn(auth_user,project.collaborators.all()) def test_check_general_eligibility_suffixed(self): user_defaults = get_ldap_user_defaults() effective_uid = '{}@colostate.edu'.format(user_defaults['username']) auth_user_defaults = dict( username=effective_uid, first_name=user_defaults['first_name'], last_name=user_defaults['last_name'], email=user_defaults['email'] ) auth_user = User.objects.create(**auth_user_defaults) account_request_defaults = dict( username=user_defaults['username'], first_name=auth_user.first_name, last_name=auth_user.last_name, email=auth_user.email, organization='csu' ) account_request = AccountRequest.objects.create(**account_request_defaults) intent = Intent.objects.create( account_request=account_request, reason_summit=True ) project_defaults = dict( pi_emails=['pi@email.org'], description='test project', organization='csu', title='test project', project_id='csu-general' ) project = Project.objects.create(**project_defaults) check_general_eligibility(account_request.__class__,account_request=account_request) project = Project.objects.get() self.assertIn(auth_user,project.collaborators.all()) def test_check_general_eligibility_no_intent(self): user_defaults = get_ldap_user_defaults() auth_user_defaults = dict( username=user_defaults['username'], first_name=user_defaults['first_name'], last_name=user_defaults['last_name'], email=user_defaults['email'] ) auth_user = User.objects.create(**auth_user_defaults) account_request_defaults = dict( username=auth_user.username, first_name=auth_user.first_name, last_name=auth_user.last_name, email=auth_user.email, organization='ucb' ) account_request = AccountRequest.objects.create(**account_request_defaults) check_general_eligibility(account_request.__class__,account_request=account_request)
Syralist/yet-another-hexapod
hexapy/Arm.py
Python
mit
6,538
0.008259
''' Copyright (C) 2013 Travis DeWolf This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import math import numpy as np import scipy.optimize class Arm3Link: def __init__(self, q=None, q0=None, L=None): """Set up the basic parameters of the arm. All lists are in order [shoulder, elbow, wrist]. :param list q: the initial joint angles of the arm :param list q0: the default (resting state) joint configuration :param list L: the arm segment lengths """ # initial joint angles if q is None: q = [math.pi/4, math.pi/4, 0] self.q = q # some default arm positions if q0 is None: q0 = np.array([math.pi/4, math.pi/4, 0]) self.q0 = q0 # arm segment lengths if L is None: L = np.array([1, 1, 1]) self.L = L self.max_angles = [math.pi, math.pi, math.pi/4] self.min_angles = [0, 0, -math.pi/4] def get_xy(self, q=None): """Returns the corresponding hand xy coordinates for a given set of joint angle values [shoulder, elbow, wrist], and the above defined arm segment lengths, L :param list q: the list of current joint angles :returns list: the [x,y] position of the arm """ if q is None: q = self.q x = self.L[0]*np.cos(q[0]) + \ self.L[1]*np.cos(q[0]+q[1]) + \ self.L[2]*np.cos(np.sum(q)) y = self.L[0]*np.sin(q[0]) + \ self.L[1]*np.sin(q[0]+q[1]) + \ self.L[2]*np.sin(np.sum(q)) return [x, y] def inv_kin(self, xy): """This is just a quick write up to find the inverse kinematics for a 3-link arm, using the SciPy optimize package minimization function. Given an (x,y) position of the hand, return a set of joint angles (q) using constraint based minimization, constraint is to match hand (x,y), minimize the distance of each joint from it's default position (q0). :param list xy: a tuple of the desired xy position of the arm :returns list: the optimal [shoulder, elbow, wrist] angle configuration """ def distance_to_default(q, *args):
"""Objective function to minimize Calculates the euclidean distance through joint space to the
default arm configuration. The weight list allows the penalty of each joint being away from the resting position to be scaled differently, such that the arm tries to stay closer to resting state more for higher weighted joints than those with a lower weight. :param list q: the list of current joint angles :returns scalar: euclidean distance to the default arm position """ # weights found with trial and error, get some wrist bend, but not much weight = [1, 1, 1.3] return np.sqrt(np.sum([(qi - q0i)**2 * wi for qi,q0i,wi in zip(q, self.q0, weight)])) def x_constraint(q, xy): """Returns the corresponding hand xy coordinates for a given set of joint angle values [shoulder, elbow, wrist], and the above defined arm segment lengths, L :param list q: the list of current joint angles :returns: the difference between current and desired x position """ x = ( self.L[0]*np.cos(q[0]) + self.L[1]*np.cos(q[0]+q[1]) + self.L[2]*np.cos(np.sum(q)) ) - xy[0] return x def y_constraint(q, xy): """Returns the corresponding hand xy coordinates for a given set of joint angle values [shoulder, elbow, wrist], and the above defined arm segment lengths, L :param list q: the list of current joint angles :returns: the difference between current and desired y position """ y = ( self.L[0]*np.sin(q[0]) + self.L[1]*np.sin(q[0]+q[1]) + self.L[2]*np.sin(np.sum(q)) ) - xy[1] return y return scipy.optimize.fmin_slsqp( func=distance_to_default, x0=self.q, eqcons=[x_constraint, y_constraint], args=(xy,), iprint=0) # iprint=0 suppresses output def test(): ############Test it!################## arm = Arm3Link() # set of desired (x,y) hand positions x = np.arange(-.75, .75, .05) y = np.arange(0, .75, .05) # threshold for printing out information, to find trouble spots thresh = .025 count = 0 total_error = 0 # test it across the range of specified x and y values for xi in range(len(x)): for yi in range(len(y)): # test the inv_kin function on a range of different targets xy = [x[xi], y[yi]] # run the inv_kin function, get the optimal joint angles q = arm.inv_kin(xy=xy) # find the (x,y) position of the hand given these angles actual_xy = arm.get_xy(q) # calculate the root squared error error = np.sqrt((np.array(xy) - np.array(actual_xy))**2) # total the error total_error += error # if the error was high, print out more information if np.sum(error) > thresh: print '-------------------------' print 'Initial joint angles', arm.q print 'Final joint angles: ', q print 'Desired hand position: ', xy print 'Actual hand position: ', actual_xy print 'Error: ', error print '-------------------------' count += 1 print '\n---------Results---------' print 'Total number of trials: ', count print 'Total error: ', total_error print '-------------------------' if __name__ == "__main__": test()
jcollie/ceph_exporter
ceph_exporter/ceph/metrics/ceph_objects_recovered.py
Python
gpl-3.0
908
0
# -*- mode: python; coding: utf-8 -*- # Copyright © 2016 by Jeffrey C. Ollie # # This file is part of ceph_exporter. # # ceph_exporter is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by t
he Free Software Foundation, either version 3 of the # License, or (at you
r option) any later version. # # ceph_exporter is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ceph_exporter. If not, see # <http://www.gnu.org/licenses/>. from ...prometheus import Metric __all__ = ['ceph_objects_recovered'] ceph_objects_recovered = Metric('ceph_objects_recovered', None, 'counter')
yshalenyk/reports
reports/tests/bids_tests.py
Python
apache-2.0
9,230
0.000108
import unittest import mock from reports.tests.base import BaseBidsUtilityTest from copy import copy test_bids_invalid = [ [{ "owner": "test", "date": "2016-03-17T13:32:25.774673+02:00", "id": "44931d9653034837baff087cfc2fb5ac", }], [{ "status": "invalid", "owner": "test", "date": "2016-04-17T13:32:25.774673+02:00", "id": "44931d9653034837baff087cfc2fb5ac" }] ] test_bids_valid = [ [{ "owner": "test", "date": "2016-04-17T13:32:25.774673+02:00", "id": "44931d9653034837baff087cfc2fb5ac", }], [{ "owner": "test", "date": "2016-05-05T13:32:25.774673+02:00", "id": "44931d9653034837baff087cfc2fb5ac", }], [{ "owner": "test", "date": "2016-05-10T13:32:25.774673+02:00", "id": "f55962b1374b43ddb886821c0582bc7f" }]] test_award_period = '2016-04-17T13:32:25.774673+02:00' class ReportBidsViewTestCase(BaseBidsUtilityTest): def test_bids_view_invalid_date(self): data = { "awardPeriod": { "startDate": test_award_period, }, 'owner': 'teser', "bids": test_bids_invalid[0], } self.assertLen(0,
data) def test_bids_view_invalid_mode(self): data = { 'mode': 'test', "awardPeriod": { "startDate": test_award_period, }, 'owner': 'teser', "bids": test_bids_valid[0],
} self.assertLen(0, data) def test_bids_view_invalid_status(self): data = { "procurementMethod": "open", "awardPeriod": { "startDate": test_award_period, }, 'owner': 'teser', 'bids': test_bids_invalid[1], } self.assertLen(0, data) def test_bids_view_invalid_method(self): data = { "procurementMethod": "test", "awardPeriod": { "startDate": test_award_period, }, 'owner': 'teser', 'bids': test_bids_valid[0], } self.assertLen(0, data) def test_bids_view_valid(self): data = { "awardPeriod": { "startDate": test_award_period, }, 'owner': 'teser', 'bids': test_bids_valid[0], } self.assertLen(1, data) response = list(self.utility.response) self.assertEqual(1000, response[0]['value']['value']) self.assertEqual( "44931d9653034837baff087cfc2fb5ac", response[0]['value']['bid'] ) self.assertEqual( "0006651836f34bcda9a030c0bf3c0e6e", response[0]['value']['tender'] ) self.assertEqual( "UA-2016-11-12-000150", response[0]['value']['tenderID'] ) self.assertEqual( u"UAH", response[0]['value']['currency'] ) def test_bids_view_period(self): self.utility.owner = 'test' data = { "awardPeriod": { "startDate": test_award_period, }, 'owner': 'teser', 'bids': test_bids_valid[0], } doc = copy(self.test_data) doc.update(data) self.utility.db.save(doc) data = { "_id": "10028cddd23540e5b6abb9efd2756d1d", "awardPeriod": { "startDate": "2016-11-09T15:00:00+02:00", }, 'owner': 'teser', 'bids': test_bids_valid[1], } doc = copy(self.test_data) doc.update(data) self.utility.db.save(doc) data = { "_id": "00028aasd2isdfsde5b6abb9efd2756d1d", "awardPeriod": { "startDate": "2016-11-30T15:00:00+02:00", }, 'owner': 'teser', 'bids': test_bids_valid[2], } doc = copy(self.test_data) doc.update(data) self.utility.db.save(doc) self.utility.start_date = '' self.utility.end_date = '' self.utility.get_response() self.assertEqual(3, len(list(self.utility.response))) self.utility.start_date = "2016-11-10T15:00:00" self.utility.end_date = '' self.utility.get_response() self.assertEqual(1, len(list(self.utility.response))) self.utility.start_date = "2016-12-01T15:00:00" self.utility.end_date = '' self.utility.get_response() self.assertEqual(0, len(list(self.utility.response))) self.utility.start_date = "2016-11-01T15:00:00" self.utility.end_date = "2016-12-01T15:00:00" self.utility.get_response() self.assertEqual(2, len(list(self.utility.response))) def test_bids_view_with_lots(self): data = { "enquiryPeriod": { "startDate": '2016-04-17T13:32:25.774673+02:00', }, "awardPeriod": { "startDate": test_award_period, }, "lots": [ { "status": "active", "id": "324d7b2dd7a54df29bad6d0b7c91b2e9", "value": { "currency": "UAH", "amount": 2000, "valueAddedTaxIncluded": False, }, } ], "bids": [ { "date": "2016-04-07T16:36:58.983102+03:00", "owner": "test", "id": "a22ef2b1374b43ddb886821c0582bc7dk", "lotValues": [ { "relatedLot": "324d7b2dd7a54df29bad6d0b7c91b2e9", "date": "2016-04-07T16:36:58.983062+03:00", } ], } ], } self.assertLen(1, data) class ReportBidsUtilityTestCase(BaseBidsUtilityTest): def test_bids_utility_output(self): data = { "awardPeriod": { "startDate": test_award_period, }, 'owner': 'test', 'bids': test_bids_valid[0], } mock_csv = mock.mock_open() doc = copy(self.test_data) doc.update(data) self.utility.db.save(doc) with mock.patch('__builtin__.open', mock_csv): self.utility.run() calls = [ mock.call('test/test@---bids.csv', 'w'), mock.call().__enter__(), mock.call().write( str(','.join(self.utility.headers) + '\r\n')), mock.call().write( '0006651836f34bcda9a030c0bf3c0e6e,' 'UA-2016-11-12-000150,,1000,UAH,' '44931d9653034837baff087cfc2fb5ac,,7.0\r\n' ), mock.call().__exit__(None, None, None), ] mock_csv.assert_has_calls(calls) def test_bids_utility_output_with_lots(self): data = { "enquiryPeriod": { "startDate": '2016-04-17T13:32:25.774673+02:00', }, "awardPeriod": { "startDate": test_award_period, }, "lots": [ { "status": "active", "id": "324d7b2dd7a54df29bad6d0b7c91b2e9", "value": { "currency": "UAH", "amount": 2000, "valueAddedTaxIncluded": False, }, } ], "bids": [ { "date": "2016-04-07T16:36:58.983102+03:00", "owner": "test", "id": "a22ef2b1374b43ddb886821c0582bc7dk", "lotValues": [ { "relatedLot": "324d7b2dd7a54df29bad6d0b7c91b2e9", "date": "2016-04-07T16:36:58.983062+03:00", } ], } ], } mock_csv = mock
kirsle/rophako
rophako/jsondb.py
Python
gpl-2.0
7,743
0.002583
# -*- coding: utf-8 -*- from __future__ import unicode_literals, print_function, absolute_import """JSON flat file database system.""" import codecs import os import os.path import re from fcntl import flock, LOCK_EX, LOCK_SH, LOCK_UN import redis import json import time from rophako.settings import Config from rophako.utils import handle_exception from rophako.log import logger redis_client = None cache_lifetime = 60*60 # 1 hour def get(document, cache=True): """Get a specific document from the DB.""" logger.debug("JsonDB: GET {}".format(document)) # Exists? if not exists(document): logger.debug("Requested document doesn't exist") return None path = mkpath(document) stat = os.stat(path) # Do we have it cached? data = get_cache(document) if cache else None if data: # Check if the cache is fresh. if stat.st_mtime > get_cache(document+"_mtime"): del_cache(document) del_cache(document+"_mtime") else: return data # Get a lock for reading. lock = lock_cache(document) # Get the JSON data. data = read_json(path) # Unlock! unlock_cache(lock) # Cache and return it. if cache: set_cache(document, data, expires=cache_lifetime) set_cache(document+"_mtime", stat.st_mtime, expires=cache_lifetime) return data def commit(document, data, cache=True): """Insert/update a document in the DB.""" # Only allow one commit at a time. lock = lock_cache(document) # Need to create the file? path = mkpath(document) if not os.path.isfile(path): parts = path.split("/") parts.pop() # Remove the file part directory = list() # Create all the folders. for part in parts: directory.append(part) segment = "/".join(directory) if len(segment) > 0 and not os.path.isdir(segment): logger.debug("JsonDB: mkdir {}".format(segment)) os.mkdir(segment, 0o755) # Write the JSON. write_json(path, data) # Update the cached document. if cache: set_cache(document, data, expires=cache_lifetime) set_cache(document+"_mtime", time.time(), expires=cache_lifetime) # Release the lock. unlock_cache(lock) def delete(document): """Delete a document from the DB.""" path = mkpath(document) if os.path.isfile(path): logger.debug("Delete DB document: {}".format(path)) os.unlink(path) del_cache(document) def exists(document): """Query whether a document exists.""" path = mkpath(document) return os.path.isfile(path) def list_docs(path, recursive=False): """List all the documents at the path.""" root = os.path.join(Config.db.db_root, path) docs = list() if not os.path.isdir(root): return [] for item in sorted(os.listdir(root)): target = os.path.join(root, item) db_path = os.path.join(path, item) # Descend into subdirectories? if os.path.isdir(target): if recursive: docs += [ os.path.join(item, name) for name in list_docs(db_path) ] else: continue if target.endswith(".json"): name = re.sub(r'\.json$', '', item) docs.append(name) return docs def mkpath(document): """Turn a DB path into a JSON file path.""" if document.endswith(".json"): # Let's not do that. raise Exception("mkpath: document path already includes .json extension!") return "{}/{}.json".format(Config.db.db_root, str(document)) def read_json(path): """Slurp, decode and return the data from a JSON document.""" path = str(path) if not os.path.isfile(path): raise Exception("Can't read JSON file {}: file not found!".format(path)) # Don't allow any fishy looking paths. if ".." in path: logger.error("ERROR: JsonDB tried to read a path with two dots: {}".format(path)) raise Exception() # Open and lock the file. fh = codecs.open(path, 'r', 'utf-8') flock(fh, LOCK_SH) text = fh.read() flock(fh, LOCK_UN) fh.close() # Decode. try: data = json.loads(text) except: logger.error("Couldn't decode JSON data from {}".format(path)) handle_exception(Exception("Couldn't decode JSON from {}\n{}".format( path, text, ))) data = None return data def write_json(path, data): """Write a JSON document.""" path = str(path) # Don't allow any fishy looking paths. if ".." in path: logger.error("ERROR: JsonDB tried to write a path with two dots: {}".format(path)) raise Exception() logger.debug("JsonDB: WRITE > {}".format(path)) # Open and lock the file. fh = codecs.open(path, 'w', 'utf-8') flock(fh, LOCK_EX) # Write it. fh.write(json.dumps(data, indent=4, separators=(',', ': ')))
# Unlock and close. flock(fh, LOCK_UN) fh.close() ############################################################################ # Redis Caching Functions # ############################################################################ disable_redis = False def get_redis(): """Connect to Redis or return the existing connection.""" global redis_client global d
isable_redis if not redis_client and not disable_redis: try: redis_client = redis.StrictRedis( host = Config.db.redis_host, port = Config.db.redis_port, db = Config.db.redis_db, ) redis_client.ping() except Exception as e: logger.error("Couldn't connect to Redis; memory caching will be disabled! {}".format(e)) redis_client = None disable_redis = True return redis_client def set_cache(key, value, expires=None): """Set a key in the Redis cache.""" key = Config.db.redis_prefix + key client = get_redis() if not client: return try: client.set(key, json.dumps(value)) # Expiration date? if expires: client.expire(key, expires) except: logger.error("Redis exception: couldn't set_cache {}".format(key)) def get_cache(key): """Get a cached item.""" key = Config.db.redis_prefix + key value = None client = get_redis() if not client: return try: value = client.get(key) if value: value = json.loads(value) except: logger.debug("Redis exception: couldn't get_cache {}".format(key)) value = None return value def del_cache(key): """Delete a cached item.""" key = Config.db.redis_prefix + key client = get_redis() if not client: return client.delete(key) def lock_cache(key, timeout=5, expire=20): """Cache level 'file locking' implementation. The `key` will be automatically suffixed with `_lock`. The `timeout` is the max amount of time to wait for a lock. The `expire` is how long a lock may exist before it's considered stale. Returns True on success, None on failure to acquire lock.""" client = get_redis() if not client: return # Take the lock. lock = client.lock(key, timeout=expire) lock.acquire() logger.debug("Cache lock acquired: {}, expires in {}s".format(key, expire)) return lock def unlock_cache(lock): """Release the lock on a cache key.""" if lock: lock.release() logger.debug("Cache lock released")
lesteve/joblib
joblib/test/test_memory.py
Python
bsd-3-clause
39,966
0.0002
""" Test the memory module. """ # Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org> # Copyright (c) 2009 Gael Varoquaux # License: BSD Style, 3 clauses. import shutil import os import os.path import pickle import sys import time import datetime import pytest from joblib.memory import Memory from joblib.memory import MemorizedFunc, NotMemorizedFunc from joblib.memory import MemorizedResult, NotMemorizedResult from joblib.memory import _FUNCTION_HASHES from joblib.memory import register_store_backend, _STORE_BACKENDS from joblib.memory import _build_func_identifier, _store_backend_factory from joblib.memory import JobLibCollisionWarning from joblib.parallel import Parallel, delayed from joblib._store_backends import StoreBackendBase, FileSystemStoreBackend from joblib.test.common import with_numpy, np from joblib.test.common import with_multiprocessing
from joblib.testing import parametrize, raises, warns from joblib._compat import PY3_OR_LATER from joblib.hashing import hash if sys.version_info[:2] >= (3, 4): import pathlib ############################################################################### # Module-level variables for the tests def f(x, y=1): """ A module-level function for testing purposes. """ return x ** 2 + y ##############################################################
################# # Helper function for the tests def check_identity_lazy(func, accumulator, location): """ Given a function and an accumulator (a list that grows every time the function is called), check that the function can be decorated by memory to be a lazy identity. """ # Call each function with several arguments, and check that it is # evaluated only once per argument. memory = Memory(location=location, verbose=0) func = memory.cache(func) for i in range(3): for _ in range(2): assert func(i) == i assert len(accumulator) == i + 1 def corrupt_single_cache_item(memory): single_cache_item, = memory.store_backend.get_items() output_filename = os.path.join(single_cache_item.path, 'output.pkl') with open(output_filename, 'w') as f: f.write('garbage') def monkeypatch_cached_func_warn(func, monkeypatch_fixture): # Need monkeypatch because pytest does not # capture stdlib logging output (see # https://github.com/pytest-dev/pytest/issues/2079) recorded = [] def append_to_record(item): recorded.append(item) monkeypatch_fixture.setattr(func, 'warn', append_to_record) return recorded ############################################################################### # Tests def test_memory_integration(tmpdir): """ Simple test of memory lazy evaluation. """ accumulator = list() # Rmk: this function has the same name than a module-level function, # thus it serves as a test to see that both are identified # as different. def f(l): accumulator.append(1) return l check_identity_lazy(f, accumulator, tmpdir.strpath) # Now test clearing for compress in (False, True): for mmap_mode in ('r', None): memory = Memory(location=tmpdir.strpath, verbose=10, mmap_mode=mmap_mode, compress=compress) # First clear the cache directory, to check that our code can # handle that # NOTE: this line would raise an exception, as the database file is # still open; we ignore the error since we want to test what # happens if the directory disappears shutil.rmtree(tmpdir.strpath, ignore_errors=True) g = memory.cache(f) g(1) g.clear(warn=False) current_accumulator = len(accumulator) out = g(1) assert len(accumulator) == current_accumulator + 1 # Also, check that Memory.eval works similarly assert memory.eval(f, 1) == out assert len(accumulator) == current_accumulator + 1 # Now do a smoke test with a function defined in __main__, as the name # mangling rules are more complex f.__module__ = '__main__' memory = Memory(location=tmpdir.strpath, verbose=0) memory.cache(f)(1) def test_no_memory(): """ Test memory with location=None: no memoize """ accumulator = list() def ff(l): accumulator.append(1) return l memory = Memory(location=None, verbose=0) gg = memory.cache(ff) for _ in range(4): current_accumulator = len(accumulator) gg(1) assert len(accumulator) == current_accumulator + 1 def test_memory_kwarg(tmpdir): " Test memory with a function with keyword arguments." accumulator = list() def g(l=None, m=1): accumulator.append(1) return l check_identity_lazy(g, accumulator, tmpdir.strpath) memory = Memory(location=tmpdir.strpath, verbose=0) g = memory.cache(g) # Smoke test with an explicit keyword argument: assert g(l=30, m=2) == 30 def test_memory_lambda(tmpdir): " Test memory with a function with a lambda." accumulator = list() def helper(x): """ A helper function to define l as a lambda. """ accumulator.append(1) return x l = lambda x: helper(x) check_identity_lazy(l, accumulator, tmpdir.strpath) def test_memory_name_collision(tmpdir): " Check that name collisions with functions will raise warnings" memory = Memory(location=tmpdir.strpath, verbose=0) @memory.cache def name_collision(x): """ A first function called name_collision """ return x a = name_collision @memory.cache def name_collision(x): """ A second function called name_collision """ return x b = name_collision with warns(JobLibCollisionWarning) as warninfo: a(1) b(1) assert len(warninfo) == 1 assert "collision" in str(warninfo[0].message) def test_memory_warning_lambda_collisions(tmpdir): # Check that multiple use of lambda will raise collisions memory = Memory(location=tmpdir.strpath, verbose=0) a = lambda x: x a = memory.cache(a) b = lambda x: x + 1 b = memory.cache(b) with warns(JobLibCollisionWarning) as warninfo: assert a(0) == 0 assert b(1) == 2 assert a(1) == 1 # In recent Python versions, we can retrieve the code of lambdas, # thus nothing is raised assert len(warninfo) == 4 def test_memory_warning_collision_detection(tmpdir): # Check that collisions impossible to detect will raise appropriate # warnings. memory = Memory(location=tmpdir.strpath, verbose=0) a1 = eval('lambda x: x') a1 = memory.cache(a1) b1 = eval('lambda x: x+1') b1 = memory.cache(b1) with warns(JobLibCollisionWarning) as warninfo: a1(1) b1(1) a1(0) assert len(warninfo) == 2 assert "cannot detect" in str(warninfo[0].message).lower() def test_memory_partial(tmpdir): " Test memory with functools.partial." accumulator = list() def func(x, y): """ A helper function to define l as a lambda. """ accumulator.append(1) return y import functools function = functools.partial(func, 1) check_identity_lazy(function, accumulator, tmpdir.strpath) def test_memory_eval(tmpdir): " Smoke test memory with a function with a function defined in an eval." memory = Memory(location=tmpdir.strpath, verbose=0) m = eval('lambda x: x') mm = memory.cache(m) assert mm(1) == 1 def count_and_append(x=[]): """ A function with a side effect in its arguments. Return the lenght of its argument and append one element. """ len_x = len(x) x.append(None) return len_x def test_argument_change(tmpdir): """ Check that if a function has a side effect in its arguments, it should use the hash of changing arguments. """ memory = Memory(location=tmpdir.strpath, verbose=0) func = memory.cache(count_and_append) # call the function for the first time, is should cache it with
OxPython/Python_set_pop
src/set_pop.py
Python
epl-1.0
574
0.017483
''' Created on Jul 9, 2014 @author: viejoemer HowTo remove an arbitrary element and retrieve that item at t
he same time? ¿Cómo eliminar un elemento de forma arbitraria y recuperar ese elemento al mismo tiempo? pop() Remove and return an arbitrary element from the set. Raises KeyError if the set is empty. ''' #Create a set with values. s_1 = set([1,2,3]) print("set one", s_1) s_2 = set() print("set one", s_2) #Removing a element
value = s_1.pop() print("Element removed",s_1) print("Value removed",value) #If the set is empty return an error value = s_2.pop()
suttond/MODOI
ase/ga/convergence.py
Python
lgpl-3.0
3,442
0.000581
"""Classes that determine convergence of an algorithm run based on population stagnation or max raw score reached""" class Convergence(object): """ Base class for all convergence object to be based on. It is necessary to supply the population instance, to be able to obtain current and former populations. """ def __init__(self, population_instance): self.pop = population_instance self.pops = {} def converged(self): """This function is called to find out if the algorithm run has converged, it should return True or False. Overwrite this in the inherited class.""" raise NotImplementedError def populate_pops(self, to_gen): """Populate the pops dictionary with how the population looked after i number of generations.""" for i in range(to_gen): if i not in self.pops.keys(): self.pops[i] = self.pop.get_population_after_generation(i) class GenerationRepetitionConvergence(Convergence): """Returns True if the latest finished population is stagnated for number_of_generations. Parameters: number_of_generations: int How many generations need to be equal before convergence. number_of_individuals: int How many of the fittest individuals should be included in the convergence test. Default is -1 meaning all in the population. max_generations: int The maximum number of generations the GA is allowed to run. Default is indefinite. """ def __init__(self, population_instance, number_of_generations, number_of_individuals=-1, max_generations=100000000): Convergence.__init__(self, population_instance) self.numgens = number_of_generations self.numindis = number_of_individuals self.maxgen = max_generations def converged(self): size = self.pop.pop_size cur_gen_num = self.pop.dc.get_generation_number(size) if cur_gen_num >= self.maxgen: return True if cur_gen_num <= 1: return False cur_pop = self.pop.get_current_population() newest =
max([i.info['key_value_pairs']['generation'] for i in cur_pop[:self.numindis]]) if newest + self.numgens > cur_gen_num: return False self.populate_pops(cur_gen_num) duplicate_gens = 1
latest_pop = self.pops[cur_gen_num - 1] for i in range(cur_gen_num - 2, -1, -1): test_pop = self.pops[i] if test_pop[:self.numindis] == latest_pop[:self.numindis]: duplicate_gens += 1 if duplicate_gens >= self.numgens: return True return False class RawScoreConvergence(Convergence): """Returns True if the supplied max_raw_score has been reached""" def __init__(self, population_instance, max_raw_score, eps=1e-3): Convergence.__init__(self, population_instance) self.max_raw_score = max_raw_score self.eps = eps def converged(self): cur_pop = self.pop.get_current_population() if abs(cur_pop[0].get_raw_score() - self.max_raw_score) <= self.eps: return True return False class NeverConvergence(object): """Test class that never converges.""" def __init__(self): pass def converged(self): return False
gerritjvv/cryptoplayground
kerberos/kdc/src/krb5-1.16/src/tests/t_keytab.py
Python
apache-2.0
5,482
0.00073
#!/usr/bin/python from k5test import * for realm in multipass_realms(create_user=False): # Test kinit with a keytab. realm.kinit(realm.host_princ, flags=['-k']) realm = K5Realm(get_creds=False, start_kadmind=True) # Test kinit with a partial keytab. pkeytab = realm.keytab + '.partial' realm.run([ktutil], input=('rkt %s\ndelent 1\nwkt %s\n' % (realm.keytab, pkeytab))) realm.kinit(realm.host_princ, flags=['-k', '-t', pkeytab]) # Test kinit with no keys for client in keytab. realm.kinit(realm.user_princ, flags=['-k'], expected_code=1, expected_msg='no suitable keys') # Test kinit and klist with client keytab defaults. realm.extract_keytab(realm.user_princ, realm.client_keytab); realm.run([kinit, '-k', '-i']) realm.klist(realm.user_princ) realm.run([kdestroy]) realm.kinit(realm.user_princ, flags=['-k', '-i']) realm.klist(realm.user_princ) out = realm.run([klist, '-k', '-i']) if realm.client_keytab not in out or realm.user_princ not in out: fail('Expected output not seen from klist -k -i') # Test implicit request for keytab (-i or -t without -k) realm.run([kdestroy]) realm.ki
nit(realm.host_princ, flags=['-t', realm.keytab],
expected_msg='keytab specified, forcing -k') realm.klist(realm.host_princ) realm.run([kdestroy]) realm.kinit(realm.user_princ, flags=['-i'], expected_msg='keytab specified, forcing -k') realm.klist(realm.user_princ) # Test extracting keys with multiple key versions present. os.remove(realm.keytab) realm.run([kadminl, 'cpw', '-randkey', '-keepold', realm.host_princ]) out = realm.run([kadminl, 'ktadd', '-norandkey', realm.host_princ]) if 'with kvno 1,' not in out or 'with kvno 2,' not in out: fail('Expected output not seen from kadmin.local ktadd -norandkey') out = realm.run([klist, '-k', '-e']) if ' 1 host/' not in out or ' 2 host/' not in out: fail('Expected output not seen from klist -k -e') # Test again using kadmin over the network. realm.prep_kadmin() os.remove(realm.keytab) out = realm.run_kadmin(['ktadd', '-norandkey', realm.host_princ]) if 'with kvno 1,' not in out or 'with kvno 2,' not in out: fail('Expected output not seen from kadmin.local ktadd -norandkey') out = realm.run([klist, '-k', '-e']) if ' 1 host/' not in out or ' 2 host/' not in out: fail('Expected output not seen from klist -k -e') # Test handling of kvno values beyond 255. Use kadmin over the # network since we used to have an 8-bit limit on kvno marshalling. # Test one key rotation, verifying that the expected new kvno appears # in the keytab and in the principal entry. def test_key_rotate(realm, princ, expected_kvno): realm.run_kadmin(['ktadd', '-k', realm.keytab, princ]) realm.run([kadminl, 'ktrem', princ, 'old']) realm.kinit(princ, flags=['-k']) msg = '%d %s' % (expected_kvno, princ) out = realm.run([klist, '-k'], expected_msg=msg) msg = 'Key: vno %d,' % expected_kvno out = realm.run_kadmin(['getprinc', princ], expected_msg=msg) princ = 'foo/bar@%s' % realm.realm realm.addprinc(princ) os.remove(realm.keytab) realm.run([kadminl, 'modprinc', '-kvno', '253', princ]) test_key_rotate(realm, princ, 254) test_key_rotate(realm, princ, 255) test_key_rotate(realm, princ, 256) test_key_rotate(realm, princ, 257) realm.run([kadminl, 'modprinc', '-kvno', '32766', princ]) test_key_rotate(realm, princ, 32767) test_key_rotate(realm, princ, 32768) test_key_rotate(realm, princ, 32769) realm.run([kadminl, 'modprinc', '-kvno', '65534', princ]) test_key_rotate(realm, princ, 65535) test_key_rotate(realm, princ, 1) test_key_rotate(realm, princ, 2) # Test that klist -k can read a keytab entry without a 32-bit kvno and # reports the 8-bit key version. record = '\x00\x01' # principal component count record += '\x00\x0bKRBTEST.COM' # realm record += '\x00\x04user' # principal component record += '\x00\x00\x00\x01' # name type (NT-PRINCIPAL) record += '\x54\xf7\x4d\x35' # timestamp record += '\x02' # key version record += '\x00\x12' # enctype record += '\x00\x20' # key length record += '\x00' * 32 # key bytes f = open(realm.keytab, 'w') f.write('\x05\x02\x00\x00\x00' + chr(len(record))) f.write(record) f.close() msg = ' 2 %s' % realm.user_princ out = realm.run([klist, '-k'], expected_msg=msg) # Make sure zero-fill isn't treated as a 32-bit kvno. f = open(realm.keytab, 'w') f.write('\x05\x02\x00\x00\x00' + chr(len(record) + 4)) f.write(record) f.write('\x00\x00\x00\x00') f.close() msg = ' 2 %s' % realm.user_princ out = realm.run([klist, '-k'], expected_msg=msg) # Make sure a hand-crafted 32-bit kvno is recognized. f = open(realm.keytab, 'w') f.write('\x05\x02\x00\x00\x00' + chr(len(record) + 4)) f.write(record) f.write('\x00\x00\x00\x03') f.close() msg = ' 3 %s' % realm.user_princ out = realm.run([klist, '-k'], expected_msg=msg) # Test parameter expansion in profile variables realm.stop() conf = {'libdefaults': { 'default_keytab_name': 'testdir/%{null}abc%{uid}', 'default_client_keytab_name': 'testdir/%{null}xyz%{uid}'}} realm = K5Realm(krb5_conf=conf, create_kdb=False) del realm.env['KRB5_KTNAME'] del realm.env['KRB5_CLIENT_KTNAME'] uidstr = str(os.getuid()) msg = 'FILE:testdir/abc%s' % uidstr out = realm.run([klist, '-k'], expected_code=1, expected_msg=msg) msg = 'FILE:testdir/xyz%s' % uidstr out = realm.run([klist, '-ki'], expected_code=1, expected_msg=msg) success('Keytab-related tests')
openstack/neutron-lib
neutron_lib/tests/unit/api/definitions/test_port.py
Python
apache-2.0
793
0
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT #
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib.api.definitions import port from neutron_lib.tests.unit.api.defini
tions import base class PortDefinitionTestCase(base.DefinitionBaseTestCase): extension_module = port extension_attributes = ()
DinoTools/ArduRPC-python
ardurpc/handler/lcd/__init__.py
Python
lgpl-3.0
1,384
0.001445
import ardurpc from ardurpc.handler import Handler class Base(Handler): """Handler for the Base Text-LCD type""" def __init__(self, **kwargs): Handler.__init__(self, **kwargs) def getWidth(self): """ Get the display width as number of characters. :return: Width :rtype: Integer """ return self._call(0x01) def getHeight(self): """ Get the display height as number of characters. :return: Height :rtype: Integer """ return self._call(0x02) def clear(self): """ Clear the LCD screen and set the cursor position to the upper-left corner. """ return self._call(0x11) def home(self): """ Set the cursor position to the upper-left corner. """ return self._call(
0x12) def setCursor(self, col, row): """ Position the cursor. """ return self._call(0x13, '>BB', col, row) def write(self, c): """ Print a single character to the LCD. """ c = c.encode('ASCII') return self._call(0x21, '>B', c[0]) def print(self, s): """ Print text to the
LCD. """ s = s.encode('ASCII') return self._call(0x22, '>B%ds' % len(s), len(s), s) ardurpc.register(0x0300, Base, mask=8)
Akrog/cinder
cinder/volume/drivers/netapp/dataontap/nfs_cmode.py
Python
apache-2.0
24,731
0
# Copyright (c) 2012 NetApp, Inc. All rights reserved. # Copyright (c) 2014 Ben Swartzlander. All rights reserved. # Copyright (c) 2014 Navneet Singh. All rights reserved. # Copyright (c) 2014 Clinton Knight. All rights reserved. # Copyright (c) 2014 Alex Meade. All rights reserved. # Copyright (c) 2014 Bob Callaway. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Volume driver for NetApp NFS storage. """ import os import uuid from oslo_utils import units import six from cinder import exception from cinder.i18n import _, _LE, _LI, _LW from cinder.image import image_utils from cinder.openstack.common import log as logging from cinder import utils from cinder.volume.drivers.netapp.dataontap.client import api as na_api from cinder.volume.drivers.netapp.dataontap.client import client_cmode from cinder.volume.drivers.netapp.dataontap import nfs_base from cinder.volume.drivers.netapp.dataontap import ssc_cmode from cinder.volume.drivers.netapp import options as na_opts from cinder.volume.drivers.netapp import utils as na_utils from cinder.volume import utils as volume_utils LOG = logging.getLogger(__name__) class NetAppCmodeNfsDriver(nfs_base.NetAppNfsDriver): """NetApp NFS driver for Data ONTAP (Cluster-mode).""" REQUIRED_CMODE_FLAGS = ['netapp_vserver'] def __init__(self, *args, **kwargs): super(NetAppCmodeNfsDriver, self).__init__(*args, **kwargs) self.configuration.append_config_values(na_opts.netapp_cluster_opts) def do_setup(self, context): """Do the customized set up on client for cluster mode.""" super(NetAppCmodeNfsDriver, self).do_setup(context) na_utils.check_flags(self.REQUIRED_CMODE_FLAGS, self.configuration) self.vserver = self.configuration.netapp_vserver self.zapi_client = client_cmode.Client( transport_type=self.configuration.netapp_transport_type, username=self.configuration.netapp_login, password=self.configuration.netapp_password, hostname=self.configuration.netapp_server_hostname, port=self.configuration.netapp_server_port, vserver=self.vserver) self.ssc_enabled = True self.ssc_vols = None self.stale_vols = set() def check_for_setup_error(self): """Check that the driver is working and can communicate.""" super(NetAppCmodeNfsDriver, self).check_for_setup_error() ssc_cmode.check_ssc_api_permissions(self.zapi_client) def create_volume(self, volume): """Creates a volume. :param volume: volume reference """ LOG.debug('create_volume on %s' % volume['host']) self._ensure_shares_mounted() # get share as pool name share = volume_utils.extract_host(volume['host'], level='pool') if share is None: msg = _("Pool is not available in the volume host field.") raise exception.InvalidHost(reason=msg) extra_specs = na_utils.get_volume_extra_specs(volume) qos_policy_group = extra_specs.pop('netapp:qos_policy_group', None) \ if extra_specs else None # warn on obsolete extra specs na_utils.log_extra_spec_warnings(extra_specs) try: volume['provider_location'] = share LOG.info(_LI('casted to %s') % volume['provider_location']) self._do_create_volume(volume) if qos_policy_group: self._set_qos_policy_group_on_volume(volume, share, qos_policy_group) return {'provider_location': volume['provider_location']} except Exception as ex: LOG.error(_LW("Exception creating vol %(name)s on " "share %(share)s. Details: %(ex)s") % {'name': volume['name'], 'share': volume['provider_location'], 'ex': ex}) volume['provider_location'] = None finally: if self.ssc_enabled: self._update_stale_vols(self._get_vol_for_share(share)) msg = _("Volume %s could not be cr
eated on shares.") raise exception.VolumeBackendAPIException(data=msg % (volume['name'])
) def _set_qos_policy_group_on_volume(self, volume, share, qos_policy_group): target_path = '%s' % (volume['name']) export_path = share.split(':')[1] flex_vol_name = self.zapi_client.get_vol_by_junc_vserver(self.vserver, export_path) self.zapi_client.file_assign_qos(flex_vol_name, qos_policy_group, target_path) def _check_volume_type(self, volume, share, file_name): """Match volume type for share file.""" extra_specs = na_utils.get_volume_extra_specs(volume) qos_policy_group = extra_specs.pop('netapp:qos_policy_group', None) \ if extra_specs else None if not self._is_share_vol_type_match(volume, share): raise exception.ManageExistingVolumeTypeMismatch( reason=(_("Volume type does not match for share %s."), share)) if qos_policy_group: try: vserver, flex_vol_name = self._get_vserver_and_exp_vol( share=share) self.zapi_client.file_assign_qos(flex_vol_name, qos_policy_group, file_name) except na_api.NaApiError as ex: LOG.exception(_LE('Setting file QoS policy group failed. %s'), ex) raise exception.NetAppDriverException( reason=(_('Setting file QoS policy group failed. %s'), ex)) def _clone_volume(self, volume_name, clone_name, volume_id, share=None): """Clones mounted volume on NetApp Cluster.""" (vserver, exp_volume) = self._get_vserver_and_exp_vol(volume_id, share) self.zapi_client.clone_file(exp_volume, volume_name, clone_name, vserver) share = share if share else self._get_provider_location(volume_id) self._post_prov_deprov_in_ssc(share) def _get_vserver_and_exp_vol(self, volume_id=None, share=None): """Gets the vserver and export volume for share.""" (host_ip, export_path) = self._get_export_ip_path(volume_id, share) ifs = self.zapi_client.get_if_info_by_ip(host_ip) vserver = ifs[0].get_child_content('vserver') exp_volume = self.zapi_client.get_vol_by_junc_vserver(vserver, export_path) return vserver, exp_volume def _update_volume_stats(self): """Retrieve stats info from vserver.""" self._ensure_shares_mounted() sync = True if self.ssc_vols is None else False ssc_cmode.refresh_cluster_ssc(self, self.zapi_client.connection, self.vserver, synchronous=sync) LOG.debug('Updating volume stats') data = {} netapp_backend = 'NetApp_NFS_Cluster_direct' backend_name = self.configuration.safe_get('volume_backend_name') data['volume_backend_name'] = backend_name or netapp_backend data['vendor_name'] = 'NetApp' data['driver_version'] = self.VERSION data['storage_protocol'] = 'nfs' data['pools'] = self._get_pool_stats()
magenta/ddsp
ddsp/training/train_util.py
Python
apache-2.0
12,295
0.008459
# Copyright 2022 The DDSP Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python3 """Library of training functions.""" import inspect import json import os import time from absl import logging from ddsp.training import cloud import gin import tensorflow.compat.v2 as tf # ---------------------- Helper Functions -------------------------------------- def get_strategy(tpu='', cluster_config=''): """Create a distribution strategy for running on accelerators. For CPU, single-GPU, or multi-GPU jobs on a single machine, call this function without args to return a MirroredStrategy. For TPU jobs, speci
fy an address to the `tpu` argument. For multi-machine GPU jobs, specify a `cluster_config` argument of the cluster configuration. Args: tpu: Address of the TPU. No TPU if left blank. cluster_config: Shoul
d be specified only for multi-worker jobs. Task specific dictionary for cluster config dict in the TF_CONFIG format. https://www.tensorflow.org/guide/distributed_training#setting_up_tf_config_environment_variable If passed as a string, will be parsed to a dictionary. Two components should be specified: cluster and task. Cluster provides information about the training cluster, which is a dict consisting of different types of jobs such as chief and worker. Task is information about the current task. For example: "{"cluster": {"worker": ["host1:port", "host2:port"]}, "task": {"type": "worker", "index": 0}}" Returns: A distribution strategy. MirroredStrategy by default. TPUStrategy if `tpu` arg is specified. MultiWorkerMirroredStrategy if `cluster_config` arg is specified. """ if tpu: logging.info('Use TPU at %s', tpu) resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=tpu) tf.config.experimental_connect_to_cluster(resolver) tf.tpu.experimental.initialize_tpu_system(resolver) strategy = tf.distribute.TPUStrategy(resolver) elif cluster_config: if not isinstance(cluster_config, dict): cluster_config = json.loads(cluster_config) cluster_spec = tf.train.ClusterSpec(cluster_config['cluster']) resolver = tf.distribute.cluster_resolver.SimpleClusterResolver( cluster_spec=cluster_spec, task_type=cluster_config['task']['type'], task_id=cluster_config['task']['index'], num_accelerators={'GPU': len(tf.config.list_physical_devices('GPU'))}, rpc_layer='grpc') strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy( cluster_resolver=resolver) else: logging.info('Defaulting to MirroredStrategy') strategy = tf.distribute.MirroredStrategy() return strategy def expand_path(file_path): return os.path.expanduser(os.path.expandvars(file_path)) def get_latest_file(dir_path, prefix='operative_config-', suffix='.gin'): """Returns latest file with pattern '/dir_path/prefix[iteration]suffix'. Args: dir_path: Path to the directory. prefix: Filename prefix, not including directory. suffix: Filename suffix, including extension. Returns: Path to the latest file Raises: FileNotFoundError: If no files match the pattern '/dir_path/prefix[int]suffix'. """ dir_path = expand_path(dir_path) dir_prefix = os.path.join(dir_path, prefix) search_pattern = dir_prefix + '*' + suffix file_paths = tf.io.gfile.glob(search_pattern) if not file_paths: raise FileNotFoundError( f'No files found matching the pattern \'{search_pattern}\'.') try: # Filter to get highest iteration, no negative iterations. get_iter = lambda fp: abs(int(fp.split(dir_prefix)[-1].split(suffix)[0])) latest_file = max(file_paths, key=get_iter) return latest_file except ValueError as verror: raise FileNotFoundError( f'Files found with pattern \'{search_pattern}\' do not match ' f'the pattern \'{dir_prefix}[iteration_number]{suffix}\'.\n\n' f'Files found:\n{file_paths}') from verror def get_latest_checkpoint(checkpoint_path): """Helper function to get path to latest checkpoint. Args: checkpoint_path: Path to the directory containing model checkpoints, or to a specific checkpoint (e.g. `/path/to/model.ckpt-iteration`). Returns: Path to latest checkpoint. Raises: FileNotFoundError: If no checkpoint is found. """ checkpoint_path = expand_path(checkpoint_path) is_checkpoint = tf.io.gfile.exists(checkpoint_path + '.index') if is_checkpoint: # Return the path if it points to a checkpoint. return checkpoint_path else: # Search using 'checkpoints' file. # Returns None if no 'checkpoints' file, or directory doesn't exist. ckpt = tf.train.latest_checkpoint(checkpoint_path) if ckpt: return ckpt else: # Last resort, look for '/path/ckpt-[iter].index' files. ckpt_f = get_latest_file(checkpoint_path, prefix='ckpt-', suffix='.index') return ckpt_f.split('.index')[0] # ---------------------------------- Gin --------------------------------------- def get_latest_operative_config(restore_dir): """Finds the most recently saved operative_config in a directory. Args: restore_dir: Path to directory with gin operative_configs. Will also work if passing a path to a file in that directory such as a checkpoint. Returns: Filepath to most recent operative config. Raises: FileNotFoundError: If no config is found. """ try: return get_latest_file( restore_dir, prefix='operative_config-', suffix='.gin') except FileNotFoundError: return get_latest_file( os.path.dirname(restore_dir), prefix='operative_config-', suffix='.gin') def write_gin_config(summary_writer, save_dir, step): """"Writes gin operative_config to save_dir and tensorboard.""" config_str = gin.operative_config_str() # Save the original config string to a file. base_name = 'operative_config-{}'.format(step) fname = os.path.join(save_dir, base_name + '.gin') with tf.io.gfile.GFile(fname, 'w') as f: f.write(config_str) # Formatting hack copied from gin.tf.GinConfigSaverHook. def format_for_tensorboard(line): """Convert a single line to markdown format.""" if not line.startswith('#'): return ' ' + line line = line[2:] if line.startswith('===='): return '' if line.startswith('None'): return ' # None.' if line.endswith(':'): return '#### ' + line return line # Convert config string to markdown. md_lines = [] for line in config_str.splitlines(): md_line = format_for_tensorboard(line) if md_line is not None: md_lines.append(md_line) md_config_str = '\n'.join(md_lines) # Add to tensorboard. with summary_writer.as_default(): text_tensor = tf.convert_to_tensor(md_config_str) tf.summary.text(name='gin/' + base_name, data=text_tensor, step=step) summary_writer.flush() def gin_register_keras_layers(): """Registers all keras layers and Sequential to be referenceable in gin.""" # Register sequential model. gin.external_configurable(tf.keras.Sequential, 'tf.keras.Sequential') # Register all the layers. for k, v in inspect.getmembers(tf.keras.layers): # Duck typing for tf.keras.layers.Layer since keras uses metaclasses. if hasattr(v, 'variables'): gin.external_configurable(v, f'tf.keras.layers.{k}') # ------------------------ Training Loop --------------------------------------- @gin.configurable def train(data_provider, trainer, batch_size=32, num_steps=1000000, steps_per_summary=300, steps_per_s
abhattad4/Digi-Menu
tests/template_tests/test_loaders.py
Python
bsd-3-clause
8,433
0.00107
# -*- coding: utf-8 -*- from __future__ import unicode_literals import os.path import sys import types import unittest from contextlib import contextmanager from django.template import Context, TemplateDoesNotExist from django.template.engine import Engine from django.test import SimpleTestCase, override_settings from django.utils import six from .utils import TEMPLATE_DIR try: import pkg_resources except ImportError: pkg_resources = None class CachedLoaderTests(SimpleTestCase): def create_engine(self, **kwargs): return Engine( loaders=[ ('django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', ]), ], ) def test_templatedir_caching(self): """ #13573 -- Template directories should be part of the cache key. """ engine = self.create_engine() # Retrieve a template specifying a template directory to check t1, name = engine.find_template('test.html', (os.path.join(TEMPLATE_DIR, 'first'),)) # Now retrieve the same template name, but from a different directory t2, name = engine.find_template('test.html', (os.path.join(TEMPLATE_DIR, 'second'),)) # The two templates should not have the same content self.assertNotEqual(t1.render(Context({})), t2.render(Context({}))) def test_missing_template_is_cached(self): """ #19949 -- TemplateDoesNotExist exceptions should be cached. """ engine = self.create_engine() loader = engine.template_loaders[0] self.assertFalse('missing.html' in loader.template_cache) with self.assertRaises(TemplateDoesNotExist): loader.load_template("missing.html") self.assertEqual( loader.template_cache["missing.html"], TemplateDoesNotExist, "Cached loader failed to cache the TemplateDoesNotExist exception", ) def test_debug_nodelist_name(self): template_name = 'index.html' engine = Engine(dirs=[TEMPLATE_DIR], debug=True) template = engine.get_template(template_name) name = template.nodelist[0].source[0].name self.assertTrue( name.endswith(template_name), 'Template loaded through cached loader has incorrect name for debug page: %s' % template_name, ) template = engine.get_template(template_name) name = template.nodelist[0].source[0].name self.assertTrue( name.endswith(template_name), 'Cached template loaded through cached loader has incorrect name for debug page: %s' % template_name, ) @unittest.skipUnless(pkg_resources, 'setuptools is not installed') class EggLoaderTests(SimpleTestCase): @contextmanager def create_egg(self, name, resources): """ Creates a mock egg with a list of resources. name: The name of the module. resources: A dictionary of template names mapped to file-like objects. """ if six.PY2: name = name.encode('utf-8') class MockLoader(object): pass class MockProvider(pkg_resources.NullProvider): def __init__(self, module): pkg_resources.NullProvider.__init__(self, module) self.module = module def _has(self, path): return path in self.module._resources def _isdir(self, path): return False def get_resource_stream(self, manager, resource_name): return self.module._resources[resource_name] def _get(self, path): return self.module._resources[path].read() def _fn(self, base, resource_name): return os.path.normcase(resource_name) egg = types.ModuleType(name) egg.__loader__ = MockLoader() egg.__path__ = ['/some/bogus/path/'] egg.__file__ = '/some/bogus/path/__init__.pyc' egg._resources = resources sys.modules[name] = egg pkg_resources._provider_factories[MockLoader] = MockProvider try: yield finally: del sys.modules[name] del pkg_resources._provider_factories[MockLoader] def setUp(self): engine = Engine(loaders=[ 'django.template.loaders.eggs.Loader', ]) self.loader = engine.template_loaders[0] def test_existing(self): templates = { os.path.normcase('templates/y.html'): six.StringIO("y"), } with self.create_egg('egg', templates): with override_settings(INSTALLED_APPS=['egg']): contents, template_name = self.loader.load_template_source("y.html") self.assertEqual(contents, "y") self.assertEqual(template_name, "egg:egg:templates/y.html") def test_non_existing(self): """ Template loading fails if the template is not in the egg. """ with self.create_egg('egg', {}): with override_settings(INSTALLED_APPS=['egg']): with self.assertRaises(TemplateDoesNotExist): self.loader.load_template_source("not-existing.html") def test_not_installed(self): """ Template loading fails if the egg is not in INSTALLED_APPS. """ templates = { os.path.normcase('templates/y.html'): six.StringIO("y"), } with self.create_egg('egg', templates): with self.assertRaises(TemplateDoesNotExist): self.loader.load_template_source("y.html") class FileSystemLoaderTests(SimpleTestCase): def setUp(self): self.engine = Engine() @contextmanager def source_checker(self, dirs): loader = self.engine.template_loaders[0] def check_sources(path, expected_sources): expected_sources = [os.path.abspath(s) for s in expected_sources] self.assertEqual( list(loader.get_template_sources(path, dirs)), expected_sources, ) yield check_sources def test_directory_security(self): with self.source_checker(['/dir1', '/dir2']) as check_sources: check_sources('index.html', ['/dir1/index.html', '/dir2/index.html']) check_sources('/etc/passwd', []) check_sources('etc/passwd', ['/dir1/etc/passwd', '/dir2/etc/passwd']) check_sources('../etc/passwd', []) check_sources('../../../etc/passwd', []) check_sources('/dir1/index.html', ['/dir1/index.html']) check_sources('../dir2/index.html', ['/dir2/index.html']) check_sources('/dir1blah', []) check_sources('../dir1blah', []) def test_unicode_template_name(self): with self.source_checker(['/dir1', '/dir2']) as check_sources: # UTF-8 bytestrings are permitted. check_sources(b'\xc3\x85ngstr\xc3\xb6m', ['/dir1/Ångström', '/dir2/Ångström']) # Unicode strings are permitted. check_so
urces('Ångström', ['/dir1/Ångström', '/dir2/Ångström']) def test_utf8_bytestring(self): """ Invalid UTF-8 encoding in bytestrings should raise a useful error """ engine = Engine() loader = engine.template_loaders[0] with self.assertRaises(UnicodeDecodeError): list(loader.get_template_sources(b'\xc3\xc3', ['/dir1'])) def test_unicode_dir_name(self): with self.source_checker([b'/Stra\xc3\x9f
e']) as check_sources: check_sources('Ångström', ['/Straße/Ångström']) check_sources(b'\xc3\x85ngstr\xc3\xb6m', ['/Straße/Ångström']) @unittest.skipUnless( os.path.normcase('/TEST') == os.path.normpath('/test'), "This test only runs on case-sensitive file systems.", ) def test_case_sensitivity(self): with self.source_checker(['/dir1', '/DIR2']) as check_sources: check_sources('index.html', ['/dir1/index.html', '/DIR2/index.html']) check_source
jbedorf/tensorflow
tensorflow/python/ops/distributions/transformed_distribution.py
Python
apache-2.0
27,637
0.005826
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A Transformed Distribution class.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import constant_op from tensorflow.python
.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import distribution as distribution_lib from tensorflow.python.ops.distributions import identity_bijector from tensorflow.python.
ops.distributions import util as distribution_util from tensorflow.python.util import deprecation __all__ = [ "TransformedDistribution", ] # The following helper functions attempt to statically perform a TF operation. # These functions make debugging easier since we can do more validation during # graph construction. def _static_value(x): """Returns the static value of a `Tensor` or `None`.""" return tensor_util.constant_value(ops.convert_to_tensor(x)) def _logical_and(*args): """Convenience function which attempts to statically `reduce_all`.""" args_ = [_static_value(x) for x in args] if any(x is not None and not bool(x) for x in args_): return constant_op.constant(False) if all(x is not None and bool(x) for x in args_): return constant_op.constant(True) if len(args) == 2: return math_ops.logical_and(*args) return math_ops.reduce_all(args) def _logical_equal(x, y): """Convenience function which attempts to statically compute `x == y`.""" x_ = _static_value(x) y_ = _static_value(y) if x_ is None or y_ is None: return math_ops.equal(x, y) return constant_op.constant(np.array_equal(x_, y_)) def _logical_not(x): """Convenience function which attempts to statically apply `logical_not`.""" x_ = _static_value(x) if x_ is None: return math_ops.logical_not(x) return constant_op.constant(np.logical_not(x_)) def _concat_vectors(*args): """Convenience function which concatenates input vectors.""" args_ = [_static_value(x) for x in args] if any(x_ is None for x_ in args_): return array_ops.concat(args, 0) return constant_op.constant([x_ for vec_ in args_ for x_ in vec_]) def _pick_scalar_condition(pred, cond_true, cond_false): """Convenience function which chooses the condition based on the predicate.""" # Note: This function is only valid if all of pred, cond_true, and cond_false # are scalars. This means its semantics are arguably more like tf.cond than # tf.select even though we use tf.select to implement it. pred_ = _static_value(pred) if pred_ is None: return array_ops.where(pred, cond_true, cond_false) return cond_true if pred_ else cond_false def _ones_like(x): """Convenience function attempts to statically construct `ones_like`.""" # Should only be used for small vectors. if x.get_shape().is_fully_defined(): return array_ops.ones(x.get_shape().as_list(), dtype=x.dtype) return array_ops.ones_like(x) def _ndims_from_shape(shape): """Returns `Tensor`'s `rank` implied by a `Tensor` shape.""" if shape.get_shape().ndims not in (None, 1): raise ValueError("input is not a valid shape: not 1D") if not shape.dtype.is_integer: raise TypeError("input is not a valid shape: wrong dtype") if shape.get_shape().is_fully_defined(): return constant_op.constant(shape.get_shape().as_list()[0]) return array_ops.shape(shape)[0] def _is_scalar_from_shape(shape): """Returns `True` `Tensor` if `Tensor` shape implies a scalar.""" return _logical_equal(_ndims_from_shape(shape), 0) class TransformedDistribution(distribution_lib.Distribution): """A Transformed Distribution. A `TransformedDistribution` models `p(y)` given a base distribution `p(x)`, and a deterministic, invertible, differentiable transform, `Y = g(X)`. The transform is typically an instance of the `Bijector` class and the base distribution is typically an instance of the `Distribution` class. A `Bijector` is expected to implement the following functions: - `forward`, - `inverse`, - `inverse_log_det_jacobian`. The semantics of these functions are outlined in the `Bijector` documentation. We now describe how a `TransformedDistribution` alters the input/outputs of a `Distribution` associated with a random variable (rv) `X`. Write `cdf(Y=y)` for an absolutely continuous cumulative distribution function of random variable `Y`; write the probability density function `pdf(Y=y) := d^k / (dy_1,...,dy_k) cdf(Y=y)` for its derivative wrt to `Y` evaluated at `y`. Assume that `Y = g(X)` where `g` is a deterministic diffeomorphism, i.e., a non-random, continuous, differentiable, and invertible function. Write the inverse of `g` as `X = g^{-1}(Y)` and `(J o g)(x)` for the Jacobian of `g` evaluated at `x`. A `TransformedDistribution` implements the following operations: * `sample` Mathematically: `Y = g(X)` Programmatically: `bijector.forward(distribution.sample(...))` * `log_prob` Mathematically: `(log o pdf)(Y=y) = (log o pdf o g^{-1})(y) + (log o abs o det o J o g^{-1})(y)` Programmatically: `(distribution.log_prob(bijector.inverse(y)) + bijector.inverse_log_det_jacobian(y))` * `log_cdf` Mathematically: `(log o cdf)(Y=y) = (log o cdf o g^{-1})(y)` Programmatically: `distribution.log_cdf(bijector.inverse(x))` * and similarly for: `cdf`, `prob`, `log_survival_function`, `survival_function`. A simple example constructing a Log-Normal distribution from a Normal distribution: ```python ds = tfp.distributions log_normal = ds.TransformedDistribution( distribution=ds.Normal(loc=0., scale=1.), bijector=ds.bijectors.Exp(), name="LogNormalTransformedDistribution") ``` A `LogNormal` made from callables: ```python ds = tfp.distributions log_normal = ds.TransformedDistribution( distribution=ds.Normal(loc=0., scale=1.), bijector=ds.bijectors.Inline( forward_fn=tf.exp, inverse_fn=tf.log, inverse_log_det_jacobian_fn=( lambda y: -tf.reduce_sum(tf.log(y), axis=-1)), name="LogNormalTransformedDistribution") ``` Another example constructing a Normal from a StandardNormal: ```python ds = tfp.distributions normal = ds.TransformedDistribution( distribution=ds.Normal(loc=0., scale=1.), bijector=ds.bijectors.Affine( shift=-1., scale_identity_multiplier=2.) name="NormalTransformedDistribution") ``` A `TransformedDistribution`'s batch- and event-shape are implied by the base distribution unless explicitly overridden by `batch_shape` or `event_shape` arguments. Specifying an overriding `batch_shape` (`event_shape`) is permitted only if the base distribution has scalar batch-shape (event-shape). The bijector is applied to the distribution as if the distribution possessed the overridden shape(s). The following example demonstrates how to construct a multivariate Normal as a `TransformedDistribution`. ```python ds = tfp.distributions # We will create two MVNs with batch_shape = event_shape = 2. mean = [[-1., 0], # batch:0 [0., 1]] # batch:1 chol_cov = [[[1., 0], [0, 1]], # batch:0
apache/arrow
dev/archery/archery/integration/tester_go.py
Python
apache-2.0
3,999
0
# Licensed to the Apache Software Foundation (ASF) under
one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); yo
u may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import contextlib import os import subprocess from .tester import Tester from .util import run_cmd, log # FIXME(sbinet): revisit for Go modules _HOME = os.getenv("HOME", "~") _GOPATH = os.getenv("GOPATH", os.path.join(_HOME, "go")) _GOBIN = os.environ.get("GOBIN", os.path.join(_GOPATH, "bin")) _GO_INTEGRATION_EXE = os.path.join(_GOBIN, "arrow-json-integration-test") _STREAM_TO_FILE = os.path.join(_GOBIN, "arrow-stream-to-file") _FILE_TO_STREAM = os.path.join(_GOBIN, "arrow-file-to-stream") _FLIGHT_SERVER_CMD = [os.path.join(_GOBIN, "arrow-flight-integration-server")] _FLIGHT_CLIENT_CMD = [ os.path.join(_GOBIN, "arrow-flight-integration-client"), "-host", "localhost", ] class GoTester(Tester): PRODUCER = True CONSUMER = True FLIGHT_SERVER = True FLIGHT_CLIENT = True name = 'Go' def _run(self, arrow_path=None, json_path=None, command='VALIDATE'): cmd = [_GO_INTEGRATION_EXE] if arrow_path is not None: cmd.extend(['-arrow', arrow_path]) if json_path is not None: cmd.extend(['-json', json_path]) cmd.extend(['-mode', command]) if self.debug: log(' '.join(cmd)) run_cmd(cmd) def validate(self, json_path, arrow_path, quirks=None): return self._run(arrow_path, json_path, 'VALIDATE') def json_to_file(self, json_path, arrow_path): return self._run(arrow_path, json_path, 'JSON_TO_ARROW') def stream_to_file(self, stream_path, file_path): cmd = [_STREAM_TO_FILE, '<', stream_path, '>', file_path] self.run_shell_command(cmd) def file_to_stream(self, file_path, stream_path): cmd = [_FILE_TO_STREAM, file_path, '>', stream_path] self.run_shell_command(cmd) @contextlib.contextmanager def flight_server(self, scenario_name=None): cmd = _FLIGHT_SERVER_CMD + ['-port=0'] if scenario_name: cmd = cmd + ['-scenario', scenario_name] if self.debug: log(' '.join(cmd)) server = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) try: output = server.stdout.readline().decode() if not output.startswith('Server listening on localhost:'): server.kill() out, err = server.communicate() raise RuntimeError( 'Flight-Go server did not start properly, ' 'stdout: \n{}\n\nstderr:\n{}\n'.format( output + out.decode(), err.decode() ) ) port = int(output.split(':')[1]) yield port finally: server.kill() server.wait(5) def flight_request(self, port, json_path=None, scenario_name=None): cmd = _FLIGHT_CLIENT_CMD + [ '-port=' + str(port), ] if json_path: cmd.extend(('-path', json_path)) elif scenario_name: cmd.extend(('-scenario', scenario_name)) else: raise TypeError('Must provide one of json_path or scenario_name') if self.debug: log(' '.join(cmd)) run_cmd(cmd)
skycucumber/Messaging-Gateway
webapp/venv/lib/python2.7/site-packages/MySQLdb/connections.py
Python
gpl-2.0
11,777
0.001359
""" This module implements connections for MySQLdb. Presently there is only one class: Connection. Others are unlikely. However, you might want to make your own subclasses. In most cases, you will probably override Connection.default_cursor with a non-standard Cursor class. """ from MySQLdb import cursors from _mysql_exceptions import Warning, Error, InterfaceError, DataError, \ DatabaseError, OperationalError, IntegrityError, InternalError, \ NotSupportedError, ProgrammingError import types, _mysql import re def defaulterrorhandler(connection, cursor, errorclass, errorvalue): """ If cursor is not None, (errorclass, errorvalue) is appended to cursor.messages; otherwise it is appended to connection.messages. Then errorclass is raised with errorvalue as the value. You can override this with your own error handler by assigning it to the instance. """ error = errorclass, errorvalue if cursor: cursor.messages.append(error) else: connection.messages.append(error) del cursor del connection raise errorclass, errorvalue re_numeric_part = re.compile(r"^(\d+)") def numeric_part(s): """Returns the leading numeric part of a string. >>> numeric_part("20-alpha") 20 >>> numeric_part("foo") >>> numeric_part("16b") 16 """ m = re_numeric_part.match(s) if m: return int(m.group(1)) return None class Connection(_mysql.connection): """MySQL Database Connection Object""" default_cursor = cursors.Cursor def __init__(self, *args, **kwargs): """ Create a connection to the database. It is strongly recommended that you only use keyword parameters. Consult the MySQL C API documentation for more information. host string, host to connect user string, user to connect as passwd string, password to use db string, database to use port integer, TCP/IP port to connect to unix_socket string, location of unix_socket to use conv conversion dictionary, see MySQLdb.converters connect_timeout number of seconds to wait before the connection attempt fails. compress if set, compression is enabled named_pipe if set, a named pipe is used to connect (Windows only) init_command command which is run once the connection is created read_default_file file from which default client values are read read_default_group configuration group to use from the default file cursorclass class object, used to create cursors (keyword only) use_unicode If True, text-like columns are returned as unicode objects using the connection's character set. Otherwise, text-like columns are returned as strings. columns are returned as normal strings. Unicode objects will always be encoded to the connection's character set regardless of this setting. charset If supplied, the connection character set will be changed to this character set (MySQL-4.1 and newer). This implies use_unicode=True. sql_mode If supplied, the session SQL mode will be changed to this setting (MySQL-4.1 and newer). For more details and legal values, see the MySQL documentation. client_flag integer, flags to use or 0 (see MySQL docs or constants/CLIENTS.py) ssl dictionary or mapping, contains SSL connection parameters; see the MySQL documentation for more details (mysql_ssl_set()). If this is set, and the client does not support SSL, NotSupportedError will be raised. local_infile integer, non-zero enables LOAD LOCAL INFILE; zero disables autocommit If False (default), autocommit is disabled. If True, autocommit is enabled. If None, autocommit isn't set and server default is used. There are a number of undocumented, non-standard methods. See the documentation for the MySQL C API for some hints on what they do. """ from MySQLdb.constants import CLIENT, FIELD_TYPE from MySQLdb.converters import conversions from weakref import proxy kwargs2 = kwargs.copy() if 'conv' in kwargs: conv = kwargs['conv'] else: conv = conversions conv2 = {} for k, v in conv.items(): if isinstance(k, int) and isinstance(v, list): conv2[k] = v[:] else: conv2[k] = v kwargs2['conv'] = conv2 cursorclass = kwargs2.pop('cursorclass', self.default_cursor) charset = kwargs2.pop('charset', '') if charset: use_unicode = True else: use_
unicode = False use_unicode = kwargs2.pop('use_unicode', use_unicode)
sql_mode = kwargs2.pop('sql_mode', '') client_flag = kwargs.get('client_flag', 0) client_version = tuple([ numeric_part(n) for n in _mysql.get_client_info().split('.')[:2] ]) if client_version >= (4, 1): client_flag |= CLIENT.MULTI_STATEMENTS if client_version >= (5, 0): client_flag |= CLIENT.MULTI_RESULTS kwargs2['client_flag'] = client_flag # PEP-249 requires autocommit to be initially off autocommit = kwargs2.pop('autocommit', False) super(Connection, self).__init__(*args, **kwargs2) self.cursorclass = cursorclass self.encoders = dict([ (k, v) for k, v in conv.items() if type(k) is not int ]) self._server_version = tuple([ numeric_part(n) for n in self.get_server_info().split('.')[:2] ]) db = proxy(self) def _get_string_literal(): def string_literal(obj, dummy=None): return db.string_literal(obj) return string_literal def _get_unicode_literal(): def unicode_literal(u, dummy=None): return db.literal(u.encode(unicode_literal.charset)) return unicode_literal def _get_string_decoder(): def string_decoder(s): return s.decode(string_decoder.charset) return string_decoder string_literal = _get_string_literal() self.unicode_literal = unicode_literal = _get_unicode_literal() self.string_decoder = string_decoder = _get_string_decoder() if not charset: charset = self.character_set_name() self.set_character_set(charset) if sql_mode: self.set_sql_mode(sql_mode) if use_unicode: self.converter[FIELD_TYPE.STRING].append((None, string_decoder)) self.converter[FIELD_TYPE.VAR_STRING].append((None, string_decoder)) self.converter[FIELD_TYPE.VARCHAR].append((None, string_decoder)) self.converter[FIELD_TYPE.BLOB].append((None, string_decoder)) self.encoders[types.StringType] = string_literal self.encoders[types.UnicodeType] = unicode_literal self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS if self._transactional: if autocommit is not None: self.autocommit(autocommit) self.messages = [] def autocommit(self, on): on = bool(on) if self.get_autocommit() != on: _mysql.connection.autocommit(self, on) def cursor(self, cursorclass=None): """ Create a cursor on which queries may be performed. The optional cursorclass parameter is used to create the Cursor. By default, self.cursorclass=cursors.Cur
gdsfactory/gdsfactory
gdsfactory/geometry/outline.py
Python
mit
2,655
0.002637
import phidl.geometry as pg import gdsfactory as gf from gdsfactory.component import Component @gf.cell def outline(elements, **kwargs) -> Component: """ Returns Component containing the outlined polygon(s). wraps phidl.geometry.outline Creates an outline around all the polygons passed in the `elements` argument. `elements` may be a Device, Polygon, or list of Devices. Args: elements: Device(/Reference), list of Device(/Reference), or Polygon Polygons to outline or Device containing polygons to outline. Keyword Args: distance: int or float Distance to offset polygons. Positive values expand, negative shrink. precision: float Desired precision for rounding vertex coordinates. num_div
isions:
array-like[2] of int The number of divisions with which the geometry is divided into multiple rectangular regions. This allows for each region to be processed sequentially, which is more computationally efficient. join: {'miter', 'bevel', 'round'} Type of join used to create the offset polygon. tolerance: int or float For miter joints, this number must be at least 2 and it represents the maximal distance in multiples of offset between new vertices and their original position before beveling to avoid spikes at acute joints. For round joints, it indicates the curvature resolution in number of points per full circle. join_first: bool Join all paths before offsetting to avoid unnecessary joins in adjacent polygon sides. max_points: int The maximum number of vertices within the resulting polygon. open_ports: bool or float If not False, holes will be cut in the outline such that the Ports are not covered. If True, the holes will have the same width as the Ports. If a float, the holes will be be widened by that value (useful for fully clearing the outline around the Ports for positive-tone processes layer: int, array-like[2], or set Specific layer(s) to put polygon geometry on.) """ return gf.read.from_phidl(component=pg.outline(elements, **kwargs)) def test_outline(): e1 = gf.components.ellipse(radii=(6, 6)) e2 = gf.components.ellipse(radii=(10, 4)) c = outline([e1, e2]) assert int(c.area()) == 52 if __name__ == "__main__": e1 = gf.components.ellipse(radii=(6, 6)) e2 = gf.components.ellipse(radii=(10, 4)) c = outline([e1, e2]) c.show()
shabab12/edx-platform
lms/djangoapps/courseware/tests/test_grades.py
Python
agpl-3.0
18,523
0.000702
""" Test grade calculation. """ from django.http import Http404 from django.test import TestCase from django.test.client import RequestFactory from mock import patch, MagicMock from nose.plugins.attrib import attr from opaque_keys.edx.locations import SlashSeparatedCourseKey from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator from courseware.grades import ( grade, iterate_grades_for, ProgressSummary, get_module_score ) from courseware.module_render import get_module from courseware.model_data import FieldDataCache, set_score from courseware.tests.helpers import ( LoginEnrollmentTestCase, get_request_for_user ) from capa.tests.response_xml_factory
import MultipleChoiceResponseXMLFactory from student.tests.factories import UserFactory from student.models import CourseEnrollment from xmodule.modulestore.tests.factories impor
t CourseFactory, ItemFactory from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase def _grade_with_errors(student, course, keep_raw_scores=False): """This fake grade method will throw exceptions for student3 and student4, but allow any other students to go through normal grading. It's meant to simulate when something goes really wrong while trying to grade a particular student, so we can test that we won't kill the entire course grading run. """ if student.username in ['student3', 'student4']: raise Exception("I don't like {}".format(student.username)) return grade(student, course, keep_raw_scores=keep_raw_scores) @attr('shard_1') class TestGradeIteration(SharedModuleStoreTestCase): """ Test iteration through student gradesets. """ COURSE_NUM = "1000" COURSE_NAME = "grading_test_course" @classmethod def setUpClass(cls): super(TestGradeIteration, cls).setUpClass() cls.course = CourseFactory.create( display_name=cls.COURSE_NAME, number=cls.COURSE_NUM ) def setUp(self): """ Create a course and a handful of users to assign grades """ super(TestGradeIteration, self).setUp() self.students = [ UserFactory.create(username='student1'), UserFactory.create(username='student2'), UserFactory.create(username='student3'), UserFactory.create(username='student4'), UserFactory.create(username='student5'), ] def test_empty_student_list(self): """If we don't pass in any students, it should return a zero-length iterator, but it shouldn't error.""" gradeset_results = list(iterate_grades_for(self.course.id, [])) self.assertEqual(gradeset_results, []) def test_nonexistent_course(self): """If the course we want to get grades for does not exist, a `Http404` should be raised. This is a horrible crossing of abstraction boundaries and should be fixed, but for now we're just testing the behavior. :-(""" with self.assertRaises(Http404): gradeset_results = iterate_grades_for(SlashSeparatedCourseKey("I", "dont", "exist"), []) gradeset_results.next() def test_all_empty_grades(self): """No students have grade entries""" all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students) self.assertEqual(len(all_errors), 0) for gradeset in all_gradesets.values(): self.assertIsNone(gradeset['grade']) self.assertEqual(gradeset['percent'], 0.0) @patch('courseware.grades.grade', _grade_with_errors) def test_grading_exception(self): """Test that we correctly capture exception messages that bubble up from grading. Note that we only see errors at this level if the grading process for this student fails entirely due to an unexpected event -- having errors in the problem sets will not trigger this. We patch the grade() method with our own, which will generate the errors for student3 and student4. """ all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students) student1, student2, student3, student4, student5 = self.students self.assertEqual( all_errors, { student3: "I don't like student3", student4: "I don't like student4" } ) # But we should still have five gradesets self.assertEqual(len(all_gradesets), 5) # Even though two will simply be empty self.assertFalse(all_gradesets[student3]) self.assertFalse(all_gradesets[student4]) # The rest will have grade information in them self.assertTrue(all_gradesets[student1]) self.assertTrue(all_gradesets[student2]) self.assertTrue(all_gradesets[student5]) ################################# Helpers ################################# def _gradesets_and_errors_for(self, course_id, students): """Simple helper method to iterate through student grades and give us two dictionaries -- one that has all students and their respective gradesets, and one that has only students that could not be graded and their respective error messages.""" students_to_gradesets = {} students_to_errors = {} for student, gradeset, err_msg in iterate_grades_for(course_id, students): students_to_gradesets[student] = gradeset if err_msg: students_to_errors[student] = err_msg return students_to_gradesets, students_to_errors class TestFieldDataCacheScorableLocations(SharedModuleStoreTestCase): """ Make sure we can filter the locations we pull back student state for via the FieldDataCache. """ @classmethod def setUpClass(cls): super(TestFieldDataCacheScorableLocations, cls).setUpClass() cls.course = CourseFactory.create() chapter = ItemFactory.create(category='chapter', parent=cls.course) sequential = ItemFactory.create(category='sequential', parent=chapter) vertical = ItemFactory.create(category='vertical', parent=sequential) ItemFactory.create(category='video', parent=vertical) ItemFactory.create(category='html', parent=vertical) ItemFactory.create(category='discussion', parent=vertical) ItemFactory.create(category='problem', parent=vertical) def setUp(self): super(TestFieldDataCacheScorableLocations, self).setUp() self.student = UserFactory.create() CourseEnrollment.enroll(self.student, self.course.id) class TestProgressSummary(TestCase): """ Test the method that calculates the score for a given block based on the cumulative scores of its children. This test class uses a hard-coded block hierarchy with scores as follows: a +--------+--------+ b c +--------------+-----------+ | d e f g +-----+ +-----+-----+ | | h i j k l m n (2/5) (3/5) (0/1) - (1/3) - (3/10) """ # Tell Django to clean out all databases, not just default multi_db = True def setUp(self): super(TestProgressSummary, self).setUp() self.course_key = CourseLocator( org='some_org', course='some_course', run='some_run' ) self.loc_a = self.create_location('chapter', 'a') self.loc_b = self.create_location('section', 'b') self.loc_c = self.create_location('section', 'c') self.loc_d = self.create_location('vertical', 'd') self.loc_e = self.create_location('vertical', 'e') self.loc_f = self.create_location('vertical', 'f') self.loc_g = self.create_location('vertical', 'g') self.loc_h = self.create_location('problem', 'h') self.loc_i
mapillary/OpenSfM
opensfm/commands/create_submodels.py
Python
bsd-2-clause
454
0
from opensfm.actions import create_submodels from . import command import argparse from opensfm.dataset import DataSet class Command(command.CommandBase): name = "create_submodels" help = "Split the dataset into smaller submodels" def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: create_submodels.run_dataset(dataset) def add_arguments_impl(self, p
ars
er: argparse.ArgumentParser) -> None: pass
ValRose/Rose_Bone
PythonLibraries/lcd_demo.py
Python
mit
1,617
0.006184
''' Created on Dec 13, 2015 @author: Shannon Litwin ''' import Adafruit_BBIO.GPIO as GPIO import Adafruit_BBIO.PWM as PWM import Lib_LCD as LCD import Lib_Main as BBB import sys import signal import time leftForward = "P8_46" leftBackward = "P8_45" rightForward = "P9_14" rightBackward = "P9_16" def Control_C_Exit(signal, frame): GPIO.cleanup() PWM.cleanup() print("\nProgram halted! Exiting program!") sys.exit() signal.signal(signal.SIGINT, Control_C_Exit) # For cleaning up mid run '''Keep to show Dr. Berry''' LCD.init() time.sleep(1) LCD.backlight("on") time.sleep(2) LCD.backlight("off") time.sleep(1) line_message = "Hi Dr. Berry." LCD.write_line(line_message) time.sleep(5) LCD.cursor_home() long_message = "This is 35 chars and needs 2 lines." LCD.write_screen(long_message) time.sleep(5) LCD.cursor_home() long_message = "Which is fine because the screen can hold up to 80 characters." LCD.write_screen(long_message) time.sleep(5) LCD.cursor_home() long_message = "However, if the message is too long it will truncate. That is why you cannot read this entire message." LCD.write_screen(long_message) time.sleep(5) LCD.clear()
m1 = "It works 1" m2 = "It works 2" m3 = "It works 3" m4 = "It works 4" time.sleep(1) LCD.goto_line(4) LCD.write_line(m4) time.sleep(1) LCD.goto_line(3) LCD.write_line(m3) time.sleep(1) LCD.goto_line(2) LCD.write_line(m2) time.sleep(1) LCD.goto_line(1) LCD.write_line(m1) LCD.clear() #pause with while loop example #start = time.time() #end = time.time() #while((end - start) < 3): # end = time.time() BBB.cl
eanup_all()
cdd1969/pygwa
lib/flowchart/nodes/n000_testnode/myNode.py
Python
gpl-2.0
578
0.00519
from lib.flowchart.nodes.generalNode import NodeWithCtrlWidget class myNode(NodeWithCtrl
Widget): '''This is test d
ocstring''' nodeName = 'myTestNode' uiTemplate = [{'name': 'HNO3', 'type': 'list', 'value': 'Closest Time'}, {'name': 'C2H5OH', 'type': 'bool', 'value': 0}, {'name': 'H20', 'type': 'str', 'value': '?/?'}] def __init__(self, name, **kwargs): super(myNode, self).__init__(name, terminals={'In': {'io': 'in'}, 'Out': {'io': 'out'}}, **kwargs) def process(self, In): print ('processing')
sarielsaz/sarielsaz
test/functional/sendheaders.py
Python
mit
24,054
0.003492
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Sarielsaz Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test behavior of headers messages to announce blocks. Setup: - Two nodes, two p2p connections to node0. One p2p connection should only ever receive inv's (omitted from testing description below, this is our control). Second node is used for creating reorgs. Part 1: No headers announcements before "sendheaders" a. node mines a block [expect: inv] send getdata for the block [expect: block] b. node mines another block [expect: inv] send getheaders and getdata [expect: headers, then block] c. node mines another block [expect: inv] peer mines a block, announces with header [expect: getdata] d. node mines another block [expect: inv] Part 2: After "sendheaders", headers announcements should generally work. a. peer sends sendheaders [expect: no response] peer sends getheaders with current tip [expect: no response] b. node mines a block [expect: tip header] c. for N in 1, ..., 10: * for announce-type in {inv, header} - peer mines N blocks, announces with announce-type [ expect: getheaders/getdata or getdata, deliver block(s) ] - node mines a block [ expect: 1 header ] Part 3: Headers announcements stop after large reorg and resume after getheaders or inv from peer. - For response-type in {inv, getheaders} * node mines a 7 block reorg [ expect: headers announcement of 8 blocks ] * node mines an 8-block reorg [ expect: inv at tip ] * peer responds with getblocks/getdata [expect: inv, blocks ] * node mines another block [ expect: inv at tip, peer sends getdata, expect: block ] * node mines another block at tip [ expect: inv ] * peer responds with getheaders with an old hashstop more than 8 blocks back [expect: headers] * peer requests block [ expect: block ] * node mines another block at tip [ expect: inv, peer sends getdata, expect: block ] * peer sends response-type [expect headers if getheaders, getheaders/getdata if mining new block] * node mines 1 block [expect: 1 header, peer responds with getdata] Part 4: Test direct fetch behavior a. Announce 2 old block headers. Expect: no getdata requests. b. Announce 3 new blocks via 1 headers message. Expect: one getdata request for all 3 blocks. (Send blocks.) c. Announce 1 header that forks off the last two blocks. Expect: no response. d. Announce 1 more header that builds on that fork. Expect: one getdata request for two blocks. e. Announce 16 more headers that build on that fork. Expect: getdata request for 14 more blocks. f. Announce 1 more header that builds on that fork. Expect: no response. Part 5: Test handling of headers that don't connect. a. Repeat 10 times: 1. Announce a header that doesn't connect. Expect: getheaders message 2. Send headers chain. Expect: getdata for the missing blocks, tip update. b. Then send 9 more headers that don't connect. Expect: getheaders message each time. c. Announce a header that does connect. Expect: no response. d. Announce 49 headers that don't connect. Expect: getheaders message each time. e. Announce one more that doesn't connect. Expect: disconnect. """ from test_framework.mininode import * from test_framework.test_framework import SarielsazTestFramework from test_framework.util import * from test_framework.blocktools import create_block, create_coinbase direct_fetch_response_time = 0.05 class TestNode(NodeConnCB): def __init__(self): super().__init__() self.block_announced = False self.last_blockhash_announced = None def clear_last_announcement(self): with mininode_lock: self.block_announced = False self.last_message.pop("inv", None) self.last_message.pop("headers", None) # Request data for a list of block hashes def get_data(self, block_hashes): msg = msg_getdata() for x in block_hashes: msg.inv.append(CInv(2, x)) self.connection.send_message(msg) def get_headers(self, locator, hashstop): msg = msg_getheaders() msg.locator.vHave = locator msg.hashstop = hashstop self.connection.send_message(msg) def send_block_inv(self, blockhash): msg = msg_inv() msg.inv = [CInv(2, blockhash)] self.connection.send_message(msg) def on_inv(self, conn, message): self.block_announced = True self.last_blockhash_announced = message.inv[-1].hash def on_headers(self, conn, message): if len(message.headers): self.block_announced = True message.headers[-1].calc_sha256() self.last_blockhash_announced = message.headers[-1].sha256 # Test whether the last announcement we received had the # right header or the right inv # inv and headers should be lists of block hashes def check_last_announcement(self, headers=None, inv=None): expect_headers = headers if headers != None else [] expect_inv = inv if inv != None else [] test_function = lambda: self.block_announced wait_until(test_function, timeout=60, lock=mininode_lock) with mininode_lock: self.block_announced = False success = True compare_inv = [] if "inv" in self.last_message: compare_inv = [x.hash for x in self.last_message["inv"].inv] if compare_inv != expect_inv: success = False hash_headers = [] if "headers" in self.last_message: # treat headers as a list of block hashes hash_headers = [ x.sha256 for x in self.last_message["headers"].headers ] if hash_headers != expect_headers: success = False self.last_message.pop("inv", None) self.last_message.pop("headers", None) return success def wait_for_getdata(self, hash_list, timeout=60): if hash_list == []: return test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list wait_until(test_function, timeout=timeout, lock=mininode_lock) return def wait_for_block_announcement(self, block_hash, timeout=60): test_function = lambda: self.last_blockhash_announced == block_hash wait_until(test_function, timeout=timeout, lock=mininode_lock) return def send_header_for_blocks(self, new_blocks): headers_message = msg_headers() headers_message.headers = [ CBlockHeader(b) for b in new_blocks ] self.send_message(headers_message) def send_getblocks(self, locator): getblocks_message = msg_getblocks() getblocks_message.locator.vHave = locator self.send_message(getblocks_message) class SendHeadersTest(SarielsazTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 2 # mine count blocks and return the new tip def mine_blocks(self, count): # Clear out last block announcement from each p2p listener [ x.clear_last_announcement() for x in self.p2p_connections ] self.nodes[0].generate(count) return int(self.nodes[0].getbestblockhash(), 16) # mine a reorg that invalidates length blocks (replacing them with # length+1 blocks). # Note: we clear the state of our p2p connections after the # to-be-reorged-out blocks are mined, so that we don't break later tests.
# return the list of block hashes newly mined def mine_reorg(self, length): self.nodes[0].generate(length) # make sure all invalidated blocks are node0's sync_blocks(self.nodes, wait=0.1) for x in self.p2p_connections: x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16)) x.clear_last_announcement() tip_height = self.nodes[1].getblockcount() hash_to_invalidate
= self.nodes[1].getblockhash(tip_height-(
Esri/ops-server-config
Utilities/ListServiceWorkspaces.py
Python
apache-2.0
6,504
0.009533
#!/usr/bin/env python #------------------------------------------------------------------------------ # Copyright 2014 Esri # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #============================================================================== #Name: ListServiceWorkspaces.py # #Purpose: Output service workspace information for each service in # specified ArcGIS Server site. # #============================================================================== import sys, os, traceback, datetime, ast, copy, json sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'SupportFiles')) from AGSRestFunctions import getServiceList from AGSRestFunctions import getServiceManifest scriptName = os.path.basename(sys.argv[0]) exitErrCode = 1 debug = False sectionBreak = '=' * 175 sectionBreak1 = '-' * 175 def check_args(): # ---------------
------------------------------------------------------ # Check arguments # --------------------------------------------------------------------- if len(sys.argv) <> 6: print '\n' + scriptName + ' <Server_FullyQualifiedDomainName> <Server_Port> <User_Name> <Password> <Use_SSL: Yes|No>' print '\nWhere:' print '\n\t<Server_FullyQualifiedDomainName> (required): th
e fully qualified domain name of the ArcGIS Server machine.' print '\n\t<Server_Port> (required): the port number of the ArcGIS Server (specify # if no port).' print '\n\t<User_Name> (required): ArcGIS Server for ArcGIS site administrator.' print '\n\t<Password> (required): Password for ArcGIS Server for ArcGIS site administrator user.' print '\n\t<Use_SSL: Yes|No> (required) Flag indicating if ArcGIS Server requires HTTPS.\n' return None else: # Set variables from parameter values server = sys.argv[1] port = sys.argv[2] adminuser = sys.argv[3] password = sys.argv[4] useSSL = sys.argv[5] if port.strip() == '#': port = None if useSSL.strip().lower() in ['yes', 'ye', 'y']: useSSL = True else: useSSL = False return server, port, adminuser, password, useSSL def parseService(service): # Parse folder and service nameType folder = None serviceNameType = None parsedService = service.split('//') if len(parsedService) == 1: serviceNameType = parsedService[0] else: folder = parsedService[0] serviceNameType = parsedService[1] return folder, serviceNameType def main(): totalSuccess = True # ------------------------------------------------- # Check arguments # ------------------------------------------------- results = check_args() if not results: sys.exit(exitErrCode) server, port, adminuser, password, useSSL = results if debug: print server, port, adminuser, password, useSSL try: # ------------------------------------------------- # Get all services that exist on server # ------------------------------------------------- if useSSL: protocol = 'https' else: protocol = 'http' allServices = getServiceList(server, port, adminuser, password) # Remove certain services from collection excludeServices = ['SampleWorldCities.MapServer'] services = [service for service in allServices if service not in excludeServices] if len(services) == 0: raise Exception('ERROR: There are no user published ArcGIS Server services. Have you published the ArcGIS Server services?') # ------------------------------------------------- # List service workspaces # ------------------------------------------------- numServices = len(services) i = 0 # Print header print 'ArcGIS Server|Service|On Server Connection String/or Path' for service in services: onServerStr = '' folder, serviceNameType = parseService(service) serviceManifest = getServiceManifest(server, port, adminuser, password, folder, serviceNameType) databases = serviceManifest.get('databases') if databases: onServerConnStr = databases[0].get('onServerConnectionString') if onServerConnStr: if onServerConnStr.find('DB_CONNECTION_PROPERTIES') > 0: # It's an enterprise geodatabase onServerStr = onServerConnStr.split('DB_CONNECTION_PROPERTIES=')[1] else: onServerStr = onServerConnStr.replace('DATABASE=','') else: if serviceNameType.find('.GeocodeServer') > 0 or serviceNameType.find('.GPServer') > 0: onServerStr = serviceManifest.get('resources')[0].get('serverPath') if len(onServerStr) == 0: onServerStr = str(serviceManifest) print '{}|{}|{}'.format(server, service, onServerStr) except: totalSuccess = False # Get the traceback object tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1]) # Print Python error messages for use in Python / Python Window print print "***** ERROR ENCOUNTERED *****" print pymsg + "\n" finally: if totalSuccess: sys.exit(0) else: sys.exit(1) if __name__ == "__main__": main()
ksmit799/Toontown-Source
toontown/minigame/RingTrackGroups.py
Python
mit
8,136
0.004671
import math import RingGameGlobals import RingAction import RingTracks import RingTrack import RingTrackGroup from direct.showbase import PythonUtil STATIC = 0 SIMPLE = 1 COMPLEX = 2 def getRandomRingTrackGroup(type, numRings, rng): global trackListGenFuncs funcTable = trackListGenFuncs[type][numRings - 1] func = rng.choice(funcTable) tracks, tOffsets, period = func(numRings, rng) tracks, tOffsets = __scramble(tracks, tOffsets, rng) trackGroup = RingTrackGroup.RingTrackGroup(tracks, period, trackTOffsets=tOffsets, reverseFlag=rng.choice([0, 1]), tOffset=rng.random()) return trackGroup def __scramble(tracks, tOffsets, rng): newTracks = [] if tOffsets == None: newTOffsets = None else: newTOffsets = [] used = [0] * len(tracks) count = 0 while count < len(tracks): i = rng.randint(0, len(tracks) - 1) if not used[i]: used[i] = 1 count += 1 newTracks.append(tracks[i]) if newTOffsets != None: newTOffsets.append(tOffsets[i]) return (newTracks, newTOffsets) def angleToXY(angle, radius = 1.0): return [radius * math.sin(angle), radius * math.cos(angle)] def getTightCircleStaticPositions(numRings): positions = [] if numRings == 1: positions.append([0, 0]) else: radius = RingGameGlobals.RING_RADIUS * 1.5 / RingGameGlobals.MAX_TOONXZ step = 2.0 * math.pi / float(numRings) for i in range(0, numRings): angle = i * step + step / 2.0 positions.append(angleToXY(angle, 1.0 / 3.0)) return positions def get_keypad(numRings, rng): positions = (RingTracks.center, RingTracks.up, RingTracks.down, RingTracks.left, RingTracks.right, RingTracks.ul, RingTracks.ur, RingTracks.lr, RingTracks.ll) tracks = [] usedPositions = [None] posScale = 0.7 + rng.random() * 0.2 for i in range(0, numRings): pos = None while pos in usedPositions: pos = rng.choice(positions) usedPositions.append(pos) scaledPos = [0, 0] scaledPos[0] = pos[0] * posScale scaledPos[1] = pos[1] * posScale action = RingAction.RingActionStaticPos(scaledPos) track = RingTrack.RingTrack([action], [1.0]) tracks.append(track) return (tracks, None, 1.0) fullCirclePeriod = 6.0 plusPeriod = 4.0 def get_evenCircle(numRings, rng): tracks = [] tOffsets = [] for i in range(0, numRings): actions, durations = RingTracks.getCircleRingActions() track = RingTrack.RingTrack(actions, durations) tracks.append(track) tOffsets.append(float(i) / numRings) return (tracks, tOffsets, fullCirclePeriod) def get_followCircle(numRings, rng): tracks = [] tOffsets = [] for i in range(0, numRings): actions, durations = RingTracks.getCircleRingActions() track = RingTrack.RingTrack(actions, durations) delay = 0.12 tracks.append(track) tOffsets.append(float(i) * delay) return (tracks, tOffsets, fullCirclePeriod) def get_evenCircle_withStationaryCenterRings(numRings, rng): tracks = [] tOffsets = [] numCenterRings = rng.randint(1, numRings - 1) positions = getTightCircleStaticPositions(numCenterRings) for i in range(0, numCenterRings): action = RingAction.RingActionStaticPos(positions[i]) track = RingTrack.RingTrack([action]) tracks.append(track) tOffsets.append(0) numOuterRings = numRings - numCenterRings for i in range(0, numOuterRings): actions, durations = RingTracks.getCircleRingActions() track = RingTrack.RingTrack(actions, durations) tracks.append(track) tOffsets.append(float(i) / numOuterRings) return (tracks, tOffsets, fullCirclePeriod) def __get_Slots(numRings, rng, vertical = 1): tracks = [] tOffsets = [] fpTab = [] for i in range(numRings): fpTab.append(PythonUtil.lineupPos(i, numRings, 2.0 / 3)) offset = 1 - fpTab[-1] offset = rng.random() * (offset * 2) - offset fpTab = map(lambda x: x + offset, fpTab) for i in range(0, numRings): if vertical: getActionsFunc = RingTracks.getVerticalSlotActions else: getActionsFunc = RingTracks.getHorizontalSlotActions actions, durations = getActionsFunc(fpTab[i]) track = RingTrack.RingTrack(actions, durations) tracks.append(track) tOffsets.append(float(i) / numRings * 0.5) return (tracks, tOffsets, fullCirclePeriod) def get_verticalSlots(numRings, rng): return __get_Slots(numRings, rng, vertical=1) def get_horizontalSlots(numRings, rng): return __get_Slots(numRings, rng, vertical=0) def get_plus(numRings, rng): up = RingTracks.getPlusUpRingActions down = RingTracks.getPlusDownRingActions left = RingTracks.getPlusLeftRingActions right = RingTracks.getPlusRightRingActions actionSets = {2: [[up, down], [left, right]], 3: [[up, left, right], [left, up, down], [down, left, right], [right, up, down]], 4: [[up, down, left, right]]} tracks = [] actionSet = rng.choice(actionSets[numRings]) for i in range(0, numRings): actions, durations = actionSet[i]() track = RingTrack.RingTrack(actions, durations) tracks.append(track) return (tracks, [0] * numRings, plusPeriod) infinityPeriod = 5.0 fullCirclePeriodFaster = 5.0 plusPeriodFaster = 2.5 infinityTOffsets = [] def __initInfinityTOffsets(): global infinityTOffsets offsets = [[], [], [], []] offsets[0] = [0.0] offsets[1] = [0.0, 3.0 / 4.0] offsets[2] = [0.0, 1.0 / 3.0, 2.0
/ 3.0] inc = 14.0 / 23.0 for numRings in range(4, 5): o = [0] * numRings accum = 0.0 for i in range(0, numRings): o[i] = accum % 1.0 accum += inc offsets[numRings - 1] = o infinityTOffsets = offsets __in
itInfinityTOffsets() def get_vertInfinity(numRings, rng): tracks = [] for i in range(0, numRings): actions, durations = RingTracks.getVerticalInfinityRingActions() track = RingTrack.RingTrack(actions, durations) tracks.append(track) return (tracks, infinityTOffsets[numRings - 1], infinityPeriod) def get_horizInfinity(numRings, rng): tracks = [] for i in range(0, numRings): actions, durations = RingTracks.getHorizontalInfinityRingActions() track = RingTrack.RingTrack(actions, durations) tracks.append(track) return (tracks, infinityTOffsets[numRings - 1], infinityPeriod) def get_evenCircle_withStationaryCenterRings_FASTER(numRings, rng): tracks, tOffsets, period = get_evenCircle_withStationaryCenterRings(numRings, rng) return (tracks, tOffsets, fullCirclePeriodFaster) def get_plus_FASTER(numRings, rng): tracks, tOffsets, period = get_plus(numRings, rng) return (tracks, tOffsets, plusPeriodFaster) allFuncs = [[get_keypad], [get_evenCircle, get_followCircle, get_evenCircle_withStationaryCenterRings, get_verticalSlots, get_horizontalSlots, get_plus], [get_vertInfinity, get_horizInfinity, get_evenCircle_withStationaryCenterRings_FASTER, get_plus_FASTER]] dontUseFuncs = [[get_followCircle, get_evenCircle_withStationaryCenterRings, get_evenCircle_withStationaryCenterRings_FASTER, get_plus, get_plus_FASTER], [], [], []] trackListGenFuncs = [] def __listComplement(list1, list2): result = [] for item in list1: if item not in list2: result.append(item) return result def __initFuncTables(): global trackListGenFuncs table = [[], [], []] for diff in range(0, len(table)): table[diff] = [[], [], [], []] for numRings in range(0, len(table[diff])): table[diff][numRings] = __listComplement(allFuncs[diff], dontUseFuncs[numRings]) trackListGenFuncs = table __initFuncTables()
ppizarror/korektor
test/_template.py
Python
gpl-2.0
1,642
0.00061
#!/usr/bin/env python # -*- coding: utf-8 -*- """ package/module TEST Descripción del test. Autor: PABLO PIZARRO @ github.com/ppizarror Fecha: AGOSTO 2016 Licencia: GPLv2 """ __author__ = "ppizarror" # Importación de librerías # noinspection PyUnresolvedReferences from _testpath import * # @UnusedWildImport import unittest # Constantes de los test DISABLE_HEAVY_TESTS = True DISABLE_HEAVY_TESTS_MSG = "Se desactivaron los tests pesados" VERBOSE = False # Se cargan argumentos desde la consola if __name__ == '__main__': from bin.arguments import argument_parser_factory argparser = argument_parser_factory("Template Test", verbose=True, version=True, enable_skipped_test=True).parse_args() DISABLE_HEAVY_TESTS = argparser.enableHeavyTest VERBOSE = argparser.verbose # Clase UnitTest class ModuleTest(unittest.TestCase): def setUp(self): """ Inicio de los test. :return: voi
d :rtype: None """ pass # noinspection PyMethodMayBeStatic def testA(self): """ Ejemplo de test. :return: void :rtype:
None """ pass @unittest.skipIf(DISABLE_HEAVY_TESTS, DISABLE_HEAVY_TESTS_MSG) def testSkipped(self): """ Ejemplo de test saltado. :return: void :rtype: None """ pass # Main test if __name__ == '__main__': runner = unittest.TextTestRunner() itersuite = unittest.TestLoader().loadTestsFromTestCase(ModuleTest) runner.run(itersuite)
cstipkovic/spidermonkey-research
testing/marionette/harness/setup.py
Python
mpl-2.0
1,214
0.002471
import os import re from setuptools import setup, find_packages THIS_DIR = os.path.dirname(os.path.realpath(__name__)) def read(*parts): with open(os.path.join(THIS_DIR, *parts)) as f: return f.read() def get_version(): return re.findall("__version__ = '([\d\.]+)'", read('marionette', '__init__.py'), re.M)[0] setup(name='marionette_client', version=get_version(), description="Marionette test automation client", long_description='See http://marionette-client.readthedocs.org/', classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='mozilla', author='Jonathan Griffin', author_email='jgriffin@mozilla.com', url='https://wiki.mozilla.org/Auto-tools/Projects/Marionette', license='MPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), package_data={'marionette': ['touch/*.js']}, include_package_data=True, zip_safe=False, entry_points=""" # -*- Entry points: -*- [console_scripts] marionette = marionette.runtests:cl
i """, install_requires=read('requirements.txt').splitlines(), )
viswimmer1/PythonGenerator
data/python_files/28964260/services.py
Python
gpl-2.0
5,901
0.002542
import oauth2 as oauth import sher.settings as settings import cgi import urlparse import urllib import gdata.youtube import gdata.youtube.service import twitter class TwitterService(object): def __init__(self, consumer_key, consumer_secret): self.consumer_key = consumer_key self.consumer_secret = consumer_secret self.consumer = oauth.Consumer(self.consumer_key, self.consumer_secret) self.client = oauth.Client(self.consumer) self.access_token_url = "https://api.twitter.com/oauth/access_token" self.request_token_url = "https://api.twitter.com/oauth/request_token" self.authorize_url = "https://api.twitter.com/oauth/authorize" def get_request_token(self): request_token_url = self.request_token_url resp, content = self.client.request(request_token_url, "POST") if resp['status'] != '200': raise Exception("Invalid Response from Twitter") request_token = dict(cgi.parse_qsl(content)) self.request_token = request_token['oauth_token'] self.request_token_secret = request_token['oauth_token_secret'] return self.request_token def get_access_token(self, oauth_verifier): access_token_url = self.access_token_url token = oauth.Token(self.request_token, self.request_token_secret) token.set_verifier(oauth_verifier) client = oauth.Client(self.consumer, token) resp, content = client.request(access_token_url, "POST") if resp['status'] != '200': raise Exception("Invalid Response from Twitter") access_token = dict(cgi.parse_qsl(content)) self.access_token = access_token['oauth_token'] self.access_token_secret = access_token['oauth_token_secret'] return access_token def get_oauth_url(self, request_token): return "%s?oauth_token=%s" % (self.authorize_url, request_token) def authenticated(self, account): """Return an authenticated twitter API instance (python-twitter)""" return twitter.Api(consumer_key=self.consumer_key, consumer_secret=self.consumer_secret, access_token_key=account.oauth_token, access_token_secret=account.oauth_secret) twitter_service = TwitterService(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET) class YouTubeService(object): def __init__(self, developer_key, client_id): self.developer_key = developer_key self.client_id = client_id self.yt_service = gdata.youtube.service.YouTubeService() def get_authsub_url(self, callback): next = callback scope = "http://gdata.youtube.com" secure = False session = True return self.yt_service.GenerateAuthSubURL(next, scope, secure, session) def upgrade_to_session(self, token): """ Takes an authsub token and upgrades to session token then returns that token for storing. """ self.yt_service.SetAuthSubToken(token) self.yt_service.UpgradeToSessionToken() return self.yt_service.GetAuthSubToken() def authenticated(self, account): self.yt_service.SetAuthSubToken(account.authsub_token) self.yt_service.developer_key = self.developer_key self.yt_service.client_id = self.client_id return self.yt_service youtube_service = YouTubeService(settings.YOUTUBE_DEVELOPER_KEY, settings.YOUTUBE_CLIENT_ID) class FacebookService(object): def __init__(self, app_id, app_key, app_secret): self.app_id = app_id self.app_key = app_key self.app_secret = app_secret def get_oauth_url(self): """Offline access gets a long-lasting token.""" return "https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=read_stream,publish_stream,offline_access" def get_access_token_url(self, callback, code): self.access_token_url = "https://graph.facebook.com/oauth/access_token?client_id=%s&redirect_uri=%s&client_secret=%s&code=%s" % (self.app_id, callback, self.app_secret, code) return self.access_token_url def authenticated(self, account): from apis import facebook graph = facebook.GraphAPI(account.oauth_token) return graph facebook_service = FacebookService(settings.FACEBOOK_APP_ID, settings.FACEBOOK_APP_KEY, settings.FACEBOOK_APP_SECRET) class FlickrService(object): def __init__(self, api_key, secret): sel
f.api_key = api_key self.secret = secret self.auth_url = "http://flickr.com/services/auth/?" self.rest_url = "http
://flickr.com/services/rest/?" def gen_sig(self, base_url, **kwargs): from md5 import md5 params = {} for kwarg in kwargs: params.update({kwarg: kwargs[kwarg]}) pkeys = params.keys() pkeys.sort() sigstring = self.secret + "" for k in pkeys: sigstring += k + str(params[k]) params['api_sig'] = md5(sigstring).hexdigest() return base_url + urllib.urlencode(params) def get_oauth_url(self): """Generates oauth url with 'delete' permission which provides both read and write permissions.""" url = self.gen_sig(self.auth_url, api_key=self.api_key, perms="delete") return url def get_auth_token(self, token): """Calls flickrs getToken to obtain a persistent auth token.""" url = self.gen_sig(self.rest_url, api_key=self.api_key, method="flickr.auth.getToken", frob=token) return url def authenticated(self, account, format="etree"): import flickrapi return flickrapi.FlickrAPI(settings.FLICKR_KEY, secret=settings.FLICKR_SECRET, token=account.oauth_token, format=format) flickr_service = FlickrService(settings.FLICKR_KEY, settings.FLICKR_SECRET)
svohara/pyvision
src/pyvision/point/DetectorROI.py
Python
bsd-3-clause
4,461
0.011432
# PyVision License # # Copyright (c) 2006-2008 David S. Bolme # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither name of copyright holders nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import pyvision as pv import numpy as np class DetectorROI: ''' This class defines an interface to a Region Of Interest (ROI) detector. ''' def __init__(self,n=250,selector='bins',bin_size=50): ''' n - is the approximate number of points requested. bin_size - the width and height of each bin in pixels. selector - ('all', 'bins', or 'best') stratagy for point selection. When corner_selector is set to bins, the image is subdivided in to bins of size <bin_size>X<bin_size> pixels and an equal number of points will be taken from each of those bins. This insures that points are found in all parts of the image not just where the corners are strongest. ''' self.n = n self.selector = selector self.bin_size = bin_size pass def detect(self,image,**kwargs): ''' Returns a list of region of interest. Each element in the list is a tuple of (score,centerpoint,radius). Radius of "None" is used for point detectors. Higher scores are better and scores of "None" indicate no score is avalible. ''' # TODO: Call subclass A = None if isinstance(image,pv.Image): A = image.asMatrix2D() elif isinstance(image,np.array) and len(image.shape)==2: A = image else: raise TypeError("ERROR Unknown Type (%s) - Only arrays and pyvision images supported."%type(image)) L = self._detect(image,**kwargs) L.sort() L.reverse() if self.selector == 'best': L=L[:self.n] elif self.selector == 'bins': nbins = A.shape[0]/self.bin_size*A.shap
e[1]/self.bin_size npts = self.n / nbins + 1 corners = [] for xmin in range(0,A.shape[0],self.bin_size): xmax = xmin + self.bin_size for ymin in range(0,A.shape[1],self.bin_size): bin_data = [] ymax = ymin + self.bin_size
for each in L: #print each if xmin <= each[1] and each[1] < xmax and ymin <= each[2] and each[2] < ymax: bin_data.append(each) if len(bin_data) >= npts: break corners += bin_data L = corners else: # TODO: assume all pass roi = [] for each in L: roi.append([each[0],pv.Point(each[1],each[2]),each[3]]) #L = concatenate((L.transpose,ones((1,L.shape[0])))) return roi def _detect(self): raise NotImplementedError("This method should be overridden in a sub class.")
tkolhar/robottelo
tests/foreman/ui/test_architecture.py
Python
gpl-3.0
5,128
0
# -*- encoding: utf-8 -*- """Test class for Architecture UI""" from fauxfactory import gen_string from nailgun import entities from robottelo.datafactory import generate_strings_list, invalid_values_list from robottelo.decorators import run_only_on, tier1 from robottelo.test import UITestCase from robottelo.ui.factory import make_arch from robottelo.ui.locators import common_locators from robottelo.ui.session import Session def valid_arch_os_names(): """Returns a tuple of arch/os names for creation tests""" return( {u'name': gen_string('alpha'), u'os_name': gen_string('alpha')}, {u'name': gen_string('html'), u'os_name': gen_string('html')}, {u'name': gen_string('utf8'), u'os_name': gen_string('utf8')}, {u'name': gen_string('alphanumeric'), u'os_name': gen_string('alphanumeric')} ) class ArchitectureTestCase(UITestCase): """Implements Architecture tests from UI""" @run_only_on('sat') @tier1 def test_positive_create_with_os(self): """@Test: Create a new Architecture with OS @Feature: Architecture - Positive Create @Assert: Architecture is created """ with Session(self.browser) as session: for test_data in valid_arch_os_names(): with self.subTest(test_data): entities.OperatingSystem( name=test_data['os_name']).create() make_arch(session, name=test_data['name'], os_names=[test_data['os_name']]) self.assertIsNotNone( self.architecture.search(test_data['name'])) @run_only_on('sat') @tier1 def test_positive_create_with_name(self): """@Test: Create a new Architecture with different data @Feature: Architecture - Positive Create @Assert: Architecture is created """ with Session(self.browser) as session: for name in generate_strings_list(): with self.subTest(name): make_arch(session, name=name) self.assertIsNotNone(self.architecture.search(name)) @run_only_on('sat') @tier1 def test_negative_create_with_invalid_name(self): """@Test: Try to create architecture and use whitespace, blank, tab symbol or too long string of different types as its name value @Feature: Architecture - Negative Create @Assert: Architecture is not created """ with Session(self.browser) as session: for invalid_name in invalid_values_list(interface='ui'): with self.subTest(invalid_name): make_arch(session, name=invalid_name) self.assertIsNotNone(self.architecture.wait_until_element( common_locators['name_haserror'])) @run_only_on('sat') @tier1 def test_negative_create_with_same_name(self): """@Test: Create a new Architecture with same name @Feature: Architecture - Negative Create @Assert: Architecture is not created """ with Session(self.browser) as session: for name in generate_strings_list(): with self.subTest(name): make_arch(session, name=name) self.assertIsNotNone(self.architecture.search(name)) make_arch(session, name=name) self.assertIsNotNone(self.architecture.wait_until_element( common_locators['name_haserror'])) @run_only_on('sat') @tier1 def test_positive_delete(self): """@Test: Delete an existing Architecture @Feature: Architecture - Delete @Assert: Architecture is deleted """ os = entities.OperatingSystem(name=gen_string('alpha')).create() with Session(self.browser) as session: for name in generate_strings_list(): with self.subTest(name): entities.Architecture( name=name, operatingsystem=[os]).create() session.nav.go_to_architectures() self.architecture.delete(name) @run_only_on('sat') @tier1 def test_positive_update_name_and_os(self): """@Test: Update Architecture with new name and OS @Feature: Architecture - Update @Assert: Architecture is updated """ old_name = gen_string('alpha') with Session(self.browser) as session: make_arch(session, name=old_nam
e) self.assertIsNotNone(self.architecture.search(old_name)) for new_name in generate_strings_list(): with self.subTest(new_name): os_name = gen_string('alpha') entities.OperatingSystem(name=os_name).create() self.architecture.update( old_n
ame, new_name, new_os_names=[os_name]) self.assertIsNotNone(self.architecture.search(new_name)) old_name = new_name # for next iteration
isslayne/enigma2
skin.py
Python
gpl-2.0
35,736
0.035678
from Tools.Profile import profile profile("LOAD:ElementTree") import xml.etree.cElementTree import os profile("LOAD:enigma_skin") from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, addFont, gRGB, eWindowStyleSkinned, getDesktop from Components.config import ConfigSubsection, ConfigText, con
fig from Components.Converter.Converter import Converter from Components.Sources.Source import Source, ObsoleteSource from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_FONTS, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists, SCOPE_SKIN_IMAGE from Tools.Import import my_import from Tools.LoadPixmap import LoadPixmap from Components.RcModel import rc_model from Components.SystemInfo impor
t SystemInfo colorNames = {} # Predefined fonts, typically used in built-in screens and for components like # the movie list and so. fonts = { "Body": ("Regular", 18, 22, 16), "ChoiceList": ("Regular", 20, 24, 18), } parameters = {} def dump(x, i=0): print " " * i + str(x) try: for n in x.childNodes: dump(n, i + 1) except: None class SkinError(Exception): def __init__(self, message): self.msg = message def __str__(self): return "{%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg) dom_skins = [ ] def addSkin(name, scope = SCOPE_SKIN): # read the skin filename = resolveFilename(scope, name) if fileExists(filename): mpath = os.path.dirname(filename) + "/" try: dom_skins.append((mpath, xml.etree.cElementTree.parse(filename).getroot())) except: print "[SKIN ERROR] error in %s" % filename return False else: return True return False # get own skin_user_skinname.xml file, if exist def skin_user_skinname(): name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml" filename = resolveFilename(SCOPE_CONFIG, name) if fileExists(filename): return name return None # we do our best to always select the "right" value # skins are loaded in order of priority: skin with # highest priority is loaded last, usually the user-provided # skin. # currently, loadSingleSkinData (colors, bordersets etc.) # are applied one-after-each, in order of ascending priority. # the dom_skin will keep all screens in descending priority, # so the first screen found will be used. # example: loadSkin("nemesis_greenline/skin.xml") config.skin = ConfigSubsection() DEFAULT_SKIN = SystemInfo["HasFullHDSkinSupport"] and "PLi-FullNightHD/skin.xml" or "PLi-HD/skin.xml" # on SD hardware, PLi-HD will not be available if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)): # in that case, fallback to Magic (which is an SD skin) DEFAULT_SKIN = "Magic/skin.xml" if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)): DEFAULT_SKIN = "skin.xml" config.skin.primary_skin = ConfigText(default=DEFAULT_SKIN) profile("LoadSkin") res = None name = skin_user_skinname() if name: res = addSkin(name, SCOPE_CONFIG) if not name or not res: addSkin('skin_user.xml', SCOPE_CONFIG) # some boxes lie about their dimensions addSkin('skin_box.xml') # add optional discrete second infobar addSkin('skin_second_infobar.xml') display_skin_id = 1 addSkin('skin_display.xml') addSkin('skin_text.xml') addSkin('skin_subtitles.xml') try: if not addSkin(config.skin.primary_skin.value): raise SkinError, "primary skin not found" except Exception, err: print "SKIN ERROR:", err skin = DEFAULT_SKIN if config.skin.primary_skin.value == skin: skin = 'skin.xml' print "defaulting to standard skin...", skin config.skin.primary_skin.value = skin addSkin(skin) del skin addSkin('skin_default.xml') profile("LoadSkinDefaultDone") # # Convert a string into a number. Used to convert object position and size attributes into a number # s is the input string. # e is the the parent object size to do relative calculations on parent # size is the size of the object size (e.g. width or height) # font is a font object to calculate relative to font sizes # Note some constructs for speeding # up simple cases that are very common. # Can do things like: 10+center-10w+4% # To center the widget on the parent widget, # but move forward 10 pixels and 4% of parent width # and 10 character widths backward # Multiplication, division and subexprsssions are also allowed: 3*(e-c/2) # # Usage: center : center the object on parent based on parent size and object size # e : take the parent size/width # c : take the center point of parent size/width # % : take given percentag of parent size/width # w : multiply by current font width # h : multiply by current font height # def parseCoordinate(s, e, size=0, font=None): s = s.strip() if s == "center": # for speed, can be common case val = (e - size)/2 elif s == '*': return None else: try: val = int(s) # for speed except: if 't' in s: s = s.replace("center", str((e-size)/2.0)) if 'e' in s: s = s.replace("e", str(e)) if 'c' in s: s = s.replace("c", str(e/2.0)) if 'w' in s: s = s.replace("w", "*" + str(fonts[font][3])) if 'h' in s: s = s.replace("h", "*" + str(fonts[font][2])) if '%' in s: s = s.replace("%", "*" + str(e/100.0)) try: val = int(s) # for speed except: val = eval(s) if val < 0: return 0 return int(val) # make sure an integer value is returned def getParentSize(object, desktop): size = eSize() if object: parent = object.getParent() # For some widgets (e.g. ScrollLabel) the skin attributes are applied to # a child widget, instead of to the widget itself. In that case, the parent # we have here is not the real parent, but it is the main widget. # We have to go one level higher to get the actual parent. # We can detect this because the 'parent' will not have a size yet # (the main widget's size will be calculated internally, as soon as the child # widget has parsed the skin attributes) if parent and parent.size().isEmpty(): parent = parent.getParent() if parent: size = parent.size() elif desktop: #widget has no parent, use desktop size instead for relative coordinates size = desktop.size() return size def parseValuePair(s, scale, object = None, desktop = None, size = None): x, y = s.split(',') parentsize = eSize() if object and ('c' in x or 'c' in y or 'e' in x or 'e' in y or '%' in x or '%' in y): # need parent size for ce% parentsize = getParentSize(object, desktop) xval = parseCoordinate(x, parentsize.width(), size and size.width() or 0) yval = parseCoordinate(y, parentsize.height(), size and size.height() or 0) return (xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1]) def parsePosition(s, scale, object = None, desktop = None, size = None): (x, y) = parseValuePair(s, scale, object, desktop, size) return ePoint(x, y) def parseSize(s, scale, object = None, desktop = None): (x, y) = parseValuePair(s, scale, object, desktop) return eSize(x, y) def parseFont(s, scale): try: f = fonts[s] name = f[0] size = f[1] except: name, size = s.split(';') return gFont(name, int(size) * scale[0][0] / scale[0][1]) def parseColor(s): if s[0] != '#': try: return colorNames[s] except: raise SkinError("color '%s' must be #aarrggbb or valid named color" % s) return gRGB(int(s[1:], 0x10)) def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))): # walk all attributes size = None pos = None font = None for attrib, value in node.items(): if attrib not in ignore: if attrib in filenames: value = resolveFilename(SCOPE_CURRENT_SKIN, value, path_prefix=skin_path_prefix) # Bit of a hack this, really. When a window has a flag (e.g. wfNoBorder) # it needs to be set at least before the size is set, in order for the # window dimensions to be calculated correctly in all situations. # If wfNoBorder is applied after the size has been set, the window will fail to cle
AustereCuriosity/astropy
astropy/analytic_functions/blackbody.py
Python
bsd-3-clause
2,225
0
# Licensed under a 3-clause BSD style license - see LICENSE.rst """Functions related to blackbody radiation.""" from __future__ import (absolute_import, division, print_function, unicode_literals) # LOCAL from ..modeling import blackbody as _bb from ..utils.decorators import deprecated __all__ = ['blackbody_nu', 'blackbody_lambda'] # Units FNU = _bb.FNU FLAM = _bb.FLAM @deprecated('2.0', alternative='astropy.modeling.blackbody.blackbody_nu') def blackbody_nu(in_x, temperature): """Calculate blackbody flux per steradian, :math:`B_{\\nu}(T)`. .. note:: Use `numpy.errstate` to suppress Numpy warnings, if desired. .. warning:: Output values might contain ``nan`` and ``inf``. Parameters ---------- in_x : number, array-like, or `~astropy.units.Quantity` Frequency, wavelength, or wave number. If not a Quantity, it is assumed to be in Hz. temperature : number, array-like, or `~astropy.units.Quantity` Blackbody temperature. If not a Quantity, it is assumed to be in Kelvin. Returns ------- flux : `~astropy.units.Quantity` Blackbody monochromatic flux in :math:`erg \\; cm^{-2} s^{-1} Hz^{-1} sr^{-1}`. Raises ------ ValueError Invalid temperature. ZeroDivisionError Wavelength is zero (when converting to frequency). """ return _bb.blackbody_nu(in_x, temperature) @deprecated('2.0', alternative='astropy.modeling.blackbody.blackbody_lambda') def blackbody_lambda(in_x, temperature): """Like :func:`blackbody_nu` but for :math:`B_{\\lambda}(T)`.
Parameters ---------- in_x : number, array-like, or `~astropy.units.Quantity` Frequency, wavelength, or wave number. If not a Quantity, it is assumed to be in Angstrom. temperature : number, array-like, or `~astropy.units.Quantity` Blackbody temperature. If not a Quantity, it is assumed to be in Kelvin. Returns ------- flux : `~astropy.units.Quantity` Blackbody monochromatic flux in
:math:`erg \\; cm^{-2} s^{-1} \\mathring{A}^{-1} sr^{-1}`. """ return _bb.blackbody_lambda(in_x, temperature)
Learning-from-our-past/Kaira
interface/valuewrapper.py
Python
gpl-2.0
1,114
0.002693
class ValueWrapper(object): xmlEntry = None # Processdata sets this every time before extracting a new Entry. idcounter = 1000 # class variable to generate @staticmethod def reset_id_counter(): ValueWrapper.idcounter = 1000 def __init__(self, val): self._value = val self.id = "t" + str(ValueWrapper.idcounter) self.manuallyEdited = False self.error = False if ValueWrapper.xmlEntry is not None and self.id in ValueWrapper.xmlEntry.attrib: # there is
manual entered value for this field in xml, use it instead self._value = ValueWrapper.xmlEntry.attrib[self.id] self.manuallyEdited = True ValueWrapper.idcounter += 1 def manualEdit(self, val): """ :param val: Meant to manually edit the
value from GUI. :return: """ self._value = val self.manuallyEdited = True @property def value(self): return self._value @value.setter def value(self, value): if not self.manuallyEdited: self._value = value
Passw/gn_GFW
build/win/merge_pgc_files.py
Python
gpl-3.0
5,397
0.008894
#!/usr/bin/env python # Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Merge the PGC files generated during the profiling step to the PGD database. This is required to workaround a flakyness in pgomgr.exe where it can run out of address space while trying to merge all the PGC files at the same time. """ import glob import json import optparse import os import subprocess import sys script_dir = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(0, os.path.join(script_dir, os.pardir)) import vs_toolchain # Number of PGC files that should be merged in each iteration, merging all # the files one by one is really slow but merging more than 10 at a time doesn't # really seem to impact the total time (when merging 180 files). # # Number of pgc merged per iteration | Time (in min) # 1 | 27.2 # 10 | 12.8 # 20 | 12.0 # 30 | 11.5 # 40 | 11.4 # 50 | 11.5 # 60 | 11.6 # 70 | 11.6 # 80 | 11.7 # # TODO(sebmarchand): Measure the memory usage of pgomgr.exe to see how it get # affected by the number of pgc files. _BATCH_SIZE_DEFAULT = 10 def find_pgomgr(chrome_checkout_dir): """Find pgomgr.exe.""" win_toolchain_json_file = os.path.join(chrome_checkout_dir, 'build', 'win_toolchain.json') if not os.path.exists(win_toolchain_json_file): raise Exception('The toolchain JSON file is missing.') with open(win_toolchain_json_file) as temp_f: toolchain_data = json.load(temp_f) if not os.path.isdir(toolchain_data['path']): raise Exception('The toolchain JSON file is invalid.') # Always use the x64 version of pgomgr (the x86 one doesn't work on the bot's # environment). pgomgr_dir = None if toolchain_data['version'] == '2017': vc_tools_root = vs_toolchain.FindVCToolsRoot() pgomgr_dir = os.path.join(vc_tools_root, 'HostX64', 'x64') pgomgr_path = os.path.join(pgomgr_dir, 'pgomgr.exe') if not os.path.exists(pgomgr_path): raise Exception('pgomgr.exe is missing from %s.' % pgomgr_dir) return pgomgr_path def merge_pgc_files(pgomgr_path, files, pgd_path): """Merge all the pgc_files in |files| to |pgd_path|.""" merge_command = [ pgomgr_path, '/merge' ] merge_command.extend(files) merge_command.append(pgd_path) proc = subprocess.Popen(merge_command, stdout=subprocess.PIPE) stdout, _ = proc.communicate() print stdout return proc.returncode def main(): parser = optparse.OptionParser(usage='%prog [options]') parser.add_option('--checkout-dir', help='The Chrome checkout directory.') parser.add_option('--target-cpu', help='[DEPRECATED] The target\'s bitness.') parser.add_option('--build-dir', help='Chrome build directory.') parser.add_option('--binary-name', help='The binary for which the PGC files ' 'should be merged, without extension.') parser.add_option('--files-per-iter', help='The number of PGC files to merge ' 'in each iteration, default to %d.' % _BATCH_SIZE_DEFAULT, type='int', default=_BATCH_SIZE_DEFAULT) options, _ = parser.parse_args() if not options.checkout_dir: parser.error('--checkout-dir is required') if not options.build_dir: parser.error('--build-dir is required') if not options.binary_name: parser.error('--binary-name is required') # Starts by finding pgomgr.exe. pgomgr_path = find_pgomgr(options.checkout_dir) pgc_files = glob.glob(os.path.join(options.build_dir, '%s*.pgc' % options.binary_name)) pgd_file = os.path.join(options.build_dir, '%s.pgd' % options.binary_name) def _split_in_chunks(items, chunk_size): """Split |items| in chunks of size |chunk_size|. Source: http://stackoverfl
ow.com/a/312464 """ for i in xrange(0, len(items), chunk_size): yield items[i:i + chunk_size] for chunk in _split_in_chunks(pgc_files, options.files_per_iter): files_to_merge = [] for p
gc_file in chunk: files_to_merge.append( os.path.join(options.build_dir, os.path.basename(pgc_file))) ret = merge_pgc_files(pgomgr_path, files_to_merge, pgd_file) # pgomgr.exe sometimes fails to merge too many files at the same time (it # usually complains that a stream is missing, but if you try to merge this # file individually it works), try to merge all the PGCs from this batch one # at a time instead. Don't fail the build if we can't merge a file. # TODO(sebmarchand): Report this to Microsoft, check if this is still # happening with VS2017. if ret != 0: print ('Error while trying to merge several PGC files at the same time, ' 'trying to merge them one by one.') for pgc_file in chunk: ret = merge_pgc_files( pgomgr_path, [os.path.join(options.build_dir, os.path.basename(pgc_file))], pgd_file ) if ret != 0: print 'Error while trying to merge %s, continuing.' % pgc_file if __name__ == '__main__': sys.exit(main())
Jannes123/inasafe
safe/common/minimum_needs.py
Python
gpl-3.0
8,353
0
# coding=utf-8 """This module contains the abstract class of the MinimumNeeds. The storage logic is omitted here.""" __author__ = 'Christian Christelis <christian@kartoza.com>' __date__ = '05/10/2014' __copyright__ = ('Copyright 2014, Australia Indonesia Facility for ' 'Disaster Reduction') from collections import OrderedDict import json from os.path import exists, dirname from os import remove from safe.utilities.i18n import tr class MinimumNeeds(object): """A abstract class for handling the minimum needs. The persistence logic is excluded from this class. .. versionadded:: 2.2. """ def get_need(self, resource): """Get a resource from the minimum_needs. :param resource: The resource name :type resource: basestring :returns: resource needed. :rtype: dict, None """ for need in self.minimum_needs: if need['name'] == resource: return need return None def get_minimum_needs(self): """Get the minimum needed information about the minimum needs. That is the resource and the amount. :returns: minimum needs :rtype: OrderedDict """ minimum_needs = OrderedDict() for resource in self.minimum_needs['resources']: if resource['Unit abbreviation']: name = '%s [%s]' % ( tr(resource['Resource name']), resource['Unit abbreviation'] ) else: name = tr(resource['Resource name']) amount = resource['Default'] minimum_needs[name] = amount return OrderedDict(minimum_needs) def get_full_needs(self): """The full list of minimum needs with all fields. :returns: minimum needs :rtype: dict """ return self.minimum_needs def set_need(self, resource, amount, units, frequency='weekly'): """Append a single new minimum need entry to the list. :param resource: Minimum need resource name. :type resource: basestring :param amount: Amount per person per time interval :type amount: int, float :param units: The unit that the resource is measured in. :type: basestring :param frequency: How regularly the unit needs to be dispatched :type: basestring # maybe at some point fix this to a selection. """ self.minimum_needs['resources'].append({ 'Resource name': resource, 'Default': amount, 'Unit abbreviation': units, 'Frequency': frequency }) def update_minimum_needs(self, minimum_needs): """Overwrite the internal minimum needs with new needs. Validate the new minimum needs. If ok, set these as the internal minimum needs. :param minimum_needs: The new minimum :type minimum_needs: dict :returns: Returns success code, -1 for failure, 0 for success. :rtype: int """ if not isinstance(minimum_needs, dict): return -1 # noinspection PyAttributeOutsideInit self.minimum_needs = minimum_needs return 0 @staticmethod def _defaults(): """Helper to get the default minimum needs. .. note:: Key names will be translated. """ rice = tr('Rice') drinking_water = tr('Drinking Water') water = tr('Clean Water') family_kits = tr('Family Kits') toilets = tr('Toilets') minimum_needs = { "resources": [ { "Default": "2.8", "Minimum allowed": "0", "Maximum allowed": "100", "Frequency": "weekly", "Resource name": rice, "Resource description": "Basic food", "Unit": "kilogram", "Units": "kilograms", "Unit abbreviation": "kg", "Readable sentence": ( "Each person should be provided with {{ Default }} " "{{ Units }} of {{ Resource name }} {{ Frequency }}.") }, { "Default": "17.5", "Minimum allowed": "
0",
"Maximum allowed": "100", "Frequency": "weekly", "Resource name": drinking_water, "Resource description": "For drinking", "Unit": "litre", "Units": "litres", "Unit abbreviation": "l", "Readable sentence": ( "Each person should be provided with {{ Default }} " "{{ Units }} of {{ Resource name }} {{ Frequency }} " "for drinking.") }, { "Default": "67", "Minimum allowed": "10", "Maximum allowed": "100", "Frequency": "weekly", "Resource name": water, "Resource description": "For washing", "Unit": "litre", "Units": "litres", "Unit abbreviation": "l", "Readable sentence": ( "Each person should be provided with {{ Default }} " "{{ Units }} of {{ Resource name }} {{ Frequency }} " "for washing.") }, { "Default": "0.2", "Minimum allowed": "0.1", "Maximum allowed": "1", "Frequency": "weekly", "Resource name": family_kits, "Resource description": "Hygiene kits", "Unit": "", "Units": "", "Unit abbreviation": "", "Readable sentence": ( "Each family of 5 persons should be provided with 1 " "Family Kit per week.") }, { "Default": "0.05", "Minimum allowed": "0.02", "Maximum allowed": "1", "Frequency": "single", "Resource name": toilets, "Resource description": "", "Unit": "", "Units": "", "Unit abbreviation": "", "Readable sentence": ( "A Toilet should be provided for every 20 persons.") } ], "provenance": "The minimum needs are based on Perka 7/2008.", "profile": "BNPB_en" } return minimum_needs def read_from_file(self, filename): """Read from an existing json file. :param filename: The file to be written to. :type filename: basestring, str :returns: Success status. -1 for unsuccessful 0 for success :rtype: int """ if not exists(filename): return -1 with open(filename) as fd: needs_json = fd.read() try: minimum_needs = json.loads(needs_json) except (TypeError, ValueError): minimum_needs = None if not minimum_needs: return -1 return self.update_minimum_needs(minimum_needs) def write_to_file(self, filename): """Write minimum needs as json to a file. :param filename: The file to be written to. :type filename: basestring, str """ if not exists(dirname(filename)): return -1 with open(filename, 'w') as fd: needs_json = json.dumps(self.minimum_needs) fd.write(needs_json) return 0 @staticmethod def remove_file(filename): """Remove a minimum needs file. :param filename: The file to be removed. :type filename: basestring, str
walchko/pygecko
dev/cpp-simple/subpub.py
Python
mit
1,370
0.00219
#!/usr/bin/env python3 # -*- coding: utf-8 -*- ############################################## # The MIT License (MIT) # Copyright (c) 2018 Kevin Walchko # see LICENSE for full details ############################################## from pygecko.multiprocessing import geckopy from pygecko.multiprocessing import GeckoSimpleProcess from pygecko.transport.protocols import MsgPack, MsgPackCustom import time from math import co
s, pi def pub(**kwargs): geckopy.init_node(**kwargs) rate = geckopy.Rate(2) p = geckopy.pubBinderTCP("local", "bob") if (p == None): print("ERROR setting up publisher") return cnt = 0 while not geckopy.is_shutdown(): # msg = "hi" + str(cnt) msg = [pi, cos(pi), cos
(pi/2,)] p.publish(msg) print("sent") rate.sleep() cnt += 1 def sub(**kwargs): geckopy.init_node(**kwargs) rate = geckopy.Rate(2) s = geckopy.subConnectTCP("local", "bob") if (s == None): print("ERROR setting up subscriber") return cnt = 0 while not geckopy.is_shutdown(): data = s.recv_nb() print("sub:", data) rate.sleep() if __name__ == '__main__': args = {} p = GeckoSimpleProcess() p.start(func=pub, name='pub', kwargs=args) s = GeckoSimpleProcess() s.start(func=sub, name='sub', kwargs=args)