text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
import numpy as np
def wzorProst3d(p1,p2):
wsp = np.array([p1[0],(p2[0]-p1[0]),p1[1],(p2[1]-p1[1]),p1[2],(p2[2]-p1[2])])
return wsp
def plaszczyznaRownolegla(p1,p2,p3):
p12 = np.array([p2[0]-p1[0],p2[1]-p1[1],p2[2]-p1[2]])
p13 = np.array([p3[0]-p1[0],p3[1]-p1[1],p3[2]-p1[2]])
wek = np.cross(p12,p13)
A = wek[0]
B = wek[1]
C = wek[2]
D = -A*p1[0] - B*p1[1] - C*p1[2]
return A,B,C,D
def punktPrzeciecia(A,B,C,D,wsp):
t = (-D - A*wsp[0] - B*wsp[2] - C*wsp[4]) / (A*wsp[1] + B*wsp[3] + C*wsp[5])
x = wsp[1]*t + wsp[0]
y = wsp[3]*t + wsp[2]
z = wsp[5]*t + wsp[4]
return x,y,z
def plaszczyznaProsotopadla(wsp,x,y,z):
A = wsp[1]
B = wsp[3]
C = wsp[5]
D = -A*x - B*y - C*z
return A,B,C,D
def position_estimate(xp1,yp1,xp2,yp2):
H = 2.0 # wysokosc kamery gornej
h = 0.4 # wyokosc kamery bocznej
L = 2.5 # odleglosc w Y kabery bocznej pod punkut(0,0,0)
corner_kam_gora = np.array([(4.0/3.0),-1.0,0.0])
width_gora = 8.0/3.0
hight_gora = 2.0
corner_kam_bok = np.array([-1.6,1.0,1.6])
width_bok = 3.2
hight_bok = 2.4
#kamera gorna piksele
pkam_gora = np.array([0.0, 0.0, H])
ppik_gora = np.array([(corner_kam_gora[0] - width_gora/1280.0*xp1),(corner_kam_gora[1] + hight_gora/960.0*yp1),corner_kam_gora[2]])
wsp_gora = wzorProst3d(pkam_gora, ppik_gora)
#piksel pomocniczy do wyznaczania plaszczyzny
ppik1_gora = np.array([-ppik_gora[0]+0.0001,ppik_gora[1],ppik_gora[2]])
#kamera boczna piksele
pkam_bok = np.array([0.0, -L, h])
ppik_bok = np.array([(corner_kam_bok[0]+width_bok/640.0*xp2),corner_kam_bok[1],(corner_kam_bok[2]-hight_bok/480.0*yp2)])
wsp_bok = wzorProst3d(pkam_bok, ppik_bok)
#plaszczyzna rownolegla do piksela gornego i przechodzaca przez piksel pomocniczy
A,B,C,D = plaszczyznaRownolegla(pkam_gora, ppik_gora, ppik1_gora)
#punkt przeciecia plaszczyzny rownoleglej z pikselem kamery bocznej
x1,y1,z1 = punktPrzeciecia(A, B, C, D, wsp_bok)
#plaszczyzna prostopadla do piksela gornego i przechodzaca przez punkt(x1,y1,z1)
A1,B1,C1,D1 = plaszczyznaProsotopadla(wsp_gora, x1, y1, z1)
#punkt przecia plaszczyzny prosotpadlej z pikselem kamery gornej
x2,y2,z2 = punktPrzeciecia(A1, B1, C1, D1, wsp_gora)
#przyplizone polozenie drona
x = (x1 + x2) / 2.0
y = (y1 + y2) / 2.0
z = (z1 + z2) / 2.0
return x,y,z
######################################################################################
x,y,z = position_estimate(240, 820, 490, 280)
dron = [x,y,z]
print(dron)
| Venris/crazyflie-multilink | KM/kamera_testy/3d_position.py | Python | gpl-2.0 | 2,600 | 0.030385 |
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess
import re
from typing import Optional
def get_cgroup_cpuset():
with open("/sys/fs/cgroup/cpuset/cpuset.cpus", "r") as f:
content = f.readlines()
cpu_set = []
values = content[0].strip().split(",")
for value in values:
if "-" in value:
# Parse the value like "2-4"
start, end = value.split("-")
cpu_set.extend([i for i in range(int(start), int(end) + 1)])
else:
cpu_set.append(int(value))
return cpu_set
def get_cpu_info():
cpuinfo = []
args = ["lscpu", "--parse=CPU,Core,Socket"]
lscpu_info = subprocess.check_output(args, universal_newlines=True).split("\n")
# Get information about cpu, core, socket and node
for line in lscpu_info:
pattern = r"^([\d]+,[\d]+,[\d]+)"
regex_out = re.search(pattern, line)
if regex_out:
cpuinfo.append(regex_out.group(1).strip().split(","))
get_physical_core = {}
get_socket = {}
for line in cpuinfo:
int_line = [int(x) for x in line]
l_id, p_id, s_id = int_line
get_physical_core[l_id] = p_id
get_socket[l_id] = s_id
return get_physical_core, get_socket
def schedule_workers(num_workers: int, cores_per_worker: Optional[int] = None):
# If we are in a docker container whose --cpuset-cpus are set,
# we can get available cpus in /sys/fs/cgroup/cpuset/cpuset.cpus.
# If we are not in a container, this just return all cpus.
cpuset = get_cgroup_cpuset()
cpuset = sorted(cpuset)
l_core_to_p_core, l_core_to_socket = get_cpu_info()
p2l = {}
p_cores = set()
for logical_core in cpuset:
physical_core = l_core_to_p_core[logical_core]
p_cores.add(physical_core)
if physical_core not in p2l:
p2l[physical_core] = logical_core
p_cores = sorted(p_cores)
if cores_per_worker is None:
cores_per_worker = len(p_cores) // num_workers
msg = "total number of cores requested must be smaller or" \
" equal than the physical cores available"
assert cores_per_worker * num_workers <= len(p_cores), msg
schedule = []
for i in range(num_workers):
schedule.append([p2l[core] for core in p_cores[i*cores_per_worker:(i+1)*cores_per_worker]])
return schedule
| intel-analytics/BigDL | python/orca/src/bigdl/orca/cpu_info.py | Python | apache-2.0 | 2,911 | 0.000687 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import bijectors
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
class CholeskyOuterProductBijectorTest(test.TestCase):
"""Tests the correctness of the Y = X @ X.T transformation."""
def testBijectorMatrix(self):
with self.test_session():
bijector = bijectors.CholeskyOuterProduct(validate_args=True)
self.assertEqual("cholesky_outer_product", bijector.name)
x = [[[1., 0], [2, 1]], [[np.sqrt(2.), 0], [np.sqrt(8.), 1]]]
y = np.matmul(x, np.transpose(x, axes=(0, 2, 1)))
# Fairly easy to compute differentials since we have 2x2.
dx_dy = [[[2. * 1, 0, 0],
[2, 1, 0],
[0, 2 * 2, 2 * 1]],
[[2 * np.sqrt(2.), 0, 0],
[np.sqrt(8.), np.sqrt(2.), 0],
[0, 2 * np.sqrt(8.), 2 * 1]]]
ildj = -np.sum(
np.log(np.asarray(dx_dy).diagonal(
offset=0, axis1=1, axis2=2)),
axis=1)
self.assertAllEqual((2, 2, 2), bijector.forward(x).get_shape())
self.assertAllEqual((2, 2, 2), bijector.inverse(y).get_shape())
self.assertAllClose(y, bijector.forward(x).eval())
self.assertAllClose(x, bijector.inverse(y).eval())
self.assertAllClose(
ildj, bijector.inverse_log_det_jacobian(y).eval(), atol=0., rtol=1e-7)
self.assertAllClose(
-bijector.inverse_log_det_jacobian(y).eval(),
bijector.forward_log_det_jacobian(x).eval(),
atol=0.,
rtol=1e-7)
def testNoBatchStatic(self):
x = np.array([[1., 0], [2, 1]]) # np.linalg.cholesky(y)
y = np.array([[1., 2], [2, 5]]) # np.matmul(x, x.T)
with self.test_session() as sess:
y_actual = bijectors.CholeskyOuterProduct().forward(x=x)
x_actual = bijectors.CholeskyOuterProduct().inverse(y=y)
[y_actual_, x_actual_] = sess.run([y_actual, x_actual])
self.assertAllEqual([2, 2], y_actual.get_shape())
self.assertAllEqual([2, 2], x_actual.get_shape())
self.assertAllClose(y, y_actual_)
self.assertAllClose(x, x_actual_)
def testNoBatchDeferred(self):
x = np.array([[1., 0], [2, 1]]) # np.linalg.cholesky(y)
y = np.array([[1., 2], [2, 5]]) # np.matmul(x, x.T)
with self.test_session() as sess:
x_pl = array_ops.placeholder(dtypes.float32)
y_pl = array_ops.placeholder(dtypes.float32)
y_actual = bijectors.CholeskyOuterProduct().forward(x=x_pl)
x_actual = bijectors.CholeskyOuterProduct().inverse(y=y_pl)
[y_actual_, x_actual_] = sess.run([y_actual, x_actual],
feed_dict={x_pl: x, y_pl: y})
self.assertEqual(None, y_actual.get_shape())
self.assertEqual(None, x_actual.get_shape())
self.assertAllClose(y, y_actual_)
self.assertAllClose(x, x_actual_)
def testBatchStatic(self):
x = np.array([[[1., 0],
[2, 1]],
[[3., 0],
[1, 2]]]) # np.linalg.cholesky(y)
y = np.array([[[1., 2],
[2, 5]],
[[9., 3],
[3, 5]]]) # np.matmul(x, x.T)
with self.test_session() as sess:
y_actual = bijectors.CholeskyOuterProduct().forward(x=x)
x_actual = bijectors.CholeskyOuterProduct().inverse(y=y)
[y_actual_, x_actual_] = sess.run([y_actual, x_actual])
self.assertEqual([2, 2, 2], y_actual.get_shape())
self.assertEqual([2, 2, 2], x_actual.get_shape())
self.assertAllClose(y, y_actual_)
self.assertAllClose(x, x_actual_)
def testBatchDeferred(self):
x = np.array([[[1., 0],
[2, 1]],
[[3., 0],
[1, 2]]]) # np.linalg.cholesky(y)
y = np.array([[[1., 2],
[2, 5]],
[[9., 3],
[3, 5]]]) # np.matmul(x, x.T)
with self.test_session() as sess:
x_pl = array_ops.placeholder(dtypes.float32)
y_pl = array_ops.placeholder(dtypes.float32)
y_actual = bijectors.CholeskyOuterProduct().forward(x=x_pl)
x_actual = bijectors.CholeskyOuterProduct().inverse(y=y_pl)
[y_actual_, x_actual_] = sess.run([y_actual, x_actual],
feed_dict={x_pl: x, y_pl: y})
self.assertEqual(None, y_actual.get_shape())
self.assertEqual(None, x_actual.get_shape())
self.assertAllClose(y, y_actual_)
self.assertAllClose(x, x_actual_)
if __name__ == "__main__":
test.main()
| allenlavoie/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/bijectors/cholesky_outer_product_test.py | Python | apache-2.0 | 5,365 | 0.006151 |
import textwrap
import yaml
from schema import SchemaError
def test_trivia_lists():
from redbot.cogs.trivia import InvalidListError, get_core_lists, get_list
list_names = get_core_lists()
assert list_names
problem_lists = []
for l in list_names:
try:
get_list(l)
except InvalidListError as exc:
e = exc.__cause__
if isinstance(e, SchemaError):
problem_lists.append((l.stem, f"SCHEMA error:\n{e!s}"))
else:
problem_lists.append((l.stem, f"YAML error:\n{e!s}"))
if problem_lists:
msg = ""
for name, error in problem_lists:
msg += f"- {name}:\n{textwrap.indent(error, ' ')}"
raise TypeError("The following lists contain errors:\n" + msg)
| palmtree5/Red-DiscordBot | tests/cogs/test_trivia.py | Python | gpl-3.0 | 799 | 0.001252 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
__all__ = [
'__version_info__',
'__version__',
'__author__',
'__author_email__',
]
__version_info__ = (0, 4, 3)
__version__ = '.'.join([unicode(i) for i in __version_info__])
__author__ = 'David Vuong'
__author_email__ = 'david@imageintelligence.com'
| ImageIntelligence/mimiron | mimiron/__init__.py | Python | mit | 334 | 0 |
#!/usr/bin/python2.7
import os
import sys
import zlib
import base64
import socket
import os.path
import argparse
from datetime import datetime
import debug
import module
import config as CFG
from poetsocket import *
__version__ = '0.4.4'
POSH_PROMPT = 'posh > '
FAKEOK = """HTTP/1.1 200 OK\r
Date: Tue, 19 Mar 2013 22:12:25 GMT\r
Server: Apache\r
X-Powered-By: PHP/5.3.10-1ubuntu3.2\r
Content-Length: 364\r
Content-Type: text/plain\r
\r
body{background-color:#f0f0f2;margin:0;padding:0;font-family:"Open Sans","Helvetica Neue",Helvetica,Arial,sans-serif}div{width:600px;margin:5em auto;padding:50px;background-color:#fff;border-radius:1em}a:link,a:visited{color:#38488f;text-decoration:none}@media (max-width:700px){body{background-color:#fff}div{width:auto;margin:0 auto;border-radius:0;padding:1em}}"""
class PoetSocketServer(PoetSocket):
def __init__(self, port):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(('', port))
self.s.listen(1)
def accept(self):
return self.s.accept()
class PoetServer(object):
"""Core server functionality.
Implements control shell, and necessary helper functions.
Attributes:
s: socket instance for initial client connection
conn: socket instance for actual client communication
cmds: list of supported control shell commands
"""
def __init__(self, s):
self.s = s
self.conn = None
self.builtins = ['exit', 'help']
# exists so modules can stop server (used by selfdestruct)
self.continue_ = True
def start(self):
"""Poet server control shell."""
debug.info('Entering control shell')
self.conn = PoetSocket(self.s.accept()[0])
print 'Welcome to posh, the Poet Shell!'
print 'Running `help\' will give you a list of supported commands.'
while True:
try:
found = False
argv = raw_input(POSH_PROMPT).split()
#
# builtins
#
if argv == []:
continue
if argv[0] == 'exit':
break
elif argv[0] == 'help':
found = True
print 'Commands:\n {}'.format('\n '.join(sorted(self.builtins + module.server_commands.keys())))
#
# modules
#
# try to find command in registered modules
for cmd, func in module.server_commands.iteritems():
if argv[0] == cmd:
found = True
try:
func(self, argv)
except Exception as e:
self.info(str(e.args))
# see comment above for self.continue_ for why this is here
if not self.continue_:
return
if not found:
self.info('{}: command not found'.format(argv[0]))
except KeyboardInterrupt:
print
continue
except EOFError:
print
break
self.conn.send('fin')
debug.info('Exiting control shell')
def info(self, msg):
print 'posh : {}'.format(msg)
def generic(self, req, write_flag=False, write_file=None):
"""Abstraction layer for exchanging with client and writing to file.
Args:
req: command to send to client
write_flag: whether client response should be written
write_file: optional filename to use for file
"""
resp = self.conn.exchange(req)
# TODO: this hardcoding is bad, should be some generic way to see
# if response should be decompressed. maybe a list of all keywords
# which cause a compressed response to come back
if req == 'recon':
resp = zlib.decompress(resp)
print resp
if write_flag:
self.write(resp, req.split()[0], write_file)
def write(self, response, prefix, write_file=None):
"""Write to server archive.
Args:
response: data to write
prefix: directory to write file to (usually named after command
executed)
write_file: optional filename to use for file
"""
ts = datetime.now().strftime('%Y%m%d%M%S')
out_ts_dir = '{}/{}'.format(CFG.ARCHIVE_DIR, ts[:len('yyyymmdd')])
out_prefix_dir = '{}/{}'.format(out_ts_dir, prefix)
# create filename to write to
if write_file:
chunks = write_file.split('.')
# separate the file extension from the file name, default to .txt
ext = '.{}'.format('.'.join(chunks[1:])) if chunks[1:] else '.txt'
outfile = '{}/{}-{}{}'.format(out_prefix_dir, chunks[0], ts, ext)
else:
outfile = '{}/{}-{}.txt'.format(out_prefix_dir, prefix, ts)
# create directories if they don't exist
if not os.path.isdir(CFG.ARCHIVE_DIR):
os.mkdir(CFG.ARCHIVE_DIR)
if not os.path.isdir(out_ts_dir):
os.mkdir(out_ts_dir)
if not os.path.isdir(out_prefix_dir):
os.mkdir(out_prefix_dir)
# if file already exists, append unique digit to the end
if os.path.exists(outfile):
count = 1
orig_outfile = outfile
outfile = orig_outfile + '.{}'.format(count)
while os.path.exists(outfile):
outfile = orig_outfile + '.{}'.format(count)
count += 1
with open(outfile, 'w') as f:
f.write(response)
print 'posh : {} written to {}'.format(prefix, outfile)
def exec_preproc(self, inp):
"""Parse posh `exec' command line.
Args:
inp: raw `exec' command line
Returns:
Tuple suitable for expansion into as self.generic() parameters.
"""
tmp = inp.split()
write_file = None
write_flag = tmp[1] == '-o'
if write_flag:
if '"' not in tmp[2]:
write_file = tmp[2]
del tmp[2]
del tmp[1]
tmp = ' '.join(tmp)
return tmp, write_flag, write_file
def get_args():
""" Parse arguments and return dictionary. """
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--port')
parser.add_argument('-v', '--version', action='store_true',
help='prints the Poet version number and exits')
return parser.parse_args()
def print_header():
""" Prints big ASCII logo and other info. """
print """
_
____ ____ ___ / /_
/ __ \/ __ \/ _ \/ __/
/ /_/ / /_/ / __/ /
/ .___/\____/\___/\__/ v{}
/_/
""".format(__version__)
def die(msg=None):
if msg:
debug.err(msg)
debug.err('Poet server terminated')
sys.exit(0)
def authenticate(ping):
"""Verify that the client is in fact connecting by checking the request
path and the auth token contained in the cookie.
Args:
ping: http request sent from client (string)
Returns:
None: client authenticated successfully
str: the reason authentication failed
"""
if ping.startswith('GET /style.css HTTP/1.1'):
if 'Cookie: c={};'.format(base64.b64encode(CFG.AUTH)) in ping:
return None
else:
return 'AUTH TOKEN'
else:
return 'REQUEST'
def drop_privs():
try:
new_uid = int(os.getenv('SUDO_UID'))
new_gid = int(os.getenv('SUDO_GID'))
except TypeError:
# they were running directly from a root user and didn't have
# sudo env variables
print """[!] WARNING: Couldn't drop privileges! To avoid this error, run from a non-root user.
You may also use sudo, from a non-root user. Continue? (y/n)""",
if raw_input().lower()[0] == 'y':
return
die()
debug.info('Dropping privileges to uid: {}, gid: {}'.format(new_uid,
new_gid))
# drop group before user, because otherwise you're not privileged enough
# to drop group
os.setgroups([])
os.setregid(new_gid, new_gid)
os.setreuid(new_uid, new_uid)
# check to make sure we can't re-escalate
try:
os.seteuid(0)
print '[!] WARNING: Failed to drop privileges! Continue? (y/n)',
if raw_input().lower()[0] != 'y':
die()
except OSError:
return
def main():
args = get_args()
if args.version:
print 'Poet version {}'.format(__version__)
sys.exit(0)
print_header()
PORT = int(args.port) if args.port else 443
try:
s = PoetSocketServer(PORT)
except socket.error as e:
if e.errno == 13:
die('You need to be root!')
if os.geteuid() == 0:
drop_privs()
debug.info('Poet server started on port: {}'.format(PORT))
module.load_modules()
while True:
try:
conn, addr = s.accept()
except KeyboardInterrupt:
die()
conntime = datetime.now().strftime(debug.DATE_FMT)
ping = conn.recv(SIZE)
if not ping:
die('Socket error: {}'.format(e.message))
auth_err = authenticate(ping)
if auth_err:
print '[!] ({}) Connected By: {} -> INVALID! ({})'.format(conntime, addr, auth_err)
conn.close()
else:
print '[+] ({}) Connected By: {} -> VALID'.format(conntime, addr)
conn.send(FAKEOK)
conn.close()
try:
PoetServer(s).start()
break
except Exception as e:
print e
die('Fatal error: {}'.format(e.message))
die()
if __name__ == '__main__':
main()
| khanhnnvn/poet | server.py | Python | mit | 10,080 | 0.001091 |
"""Dictionary Of Keys based matrix"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['dok_matrix', 'isspmatrix_dok']
import functools
import operator
import numpy as np
from scipy._lib.six import zip as izip, xrange, iteritems, itervalues
from .base import spmatrix, isspmatrix
from .sputils import (isdense, getdtype, isshape, isintlike, isscalarlike,
upcast, upcast_scalar, IndexMixin, get_index_dtype)
try:
from operator import isSequenceType as _is_sequence
except ImportError:
def _is_sequence(x):
return (hasattr(x, '__len__') or hasattr(x, '__next__')
or hasattr(x, 'next'))
class dok_matrix(spmatrix, IndexMixin, dict):
"""
Dictionary Of Keys based sparse matrix.
This is an efficient structure for constructing sparse
matrices incrementally.
This can be instantiated in several ways:
dok_matrix(D)
with a dense matrix, D
dok_matrix(S)
with a sparse matrix, S
dok_matrix((M,N), [dtype])
create the matrix with initial shape (M,N)
dtype is optional, defaulting to dtype='d'
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of nonzero elements
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
Allows for efficient O(1) access of individual elements.
Duplicates are not allowed.
Can be efficiently converted to a coo_matrix once constructed.
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import dok_matrix
>>> S = dok_matrix((5, 5), dtype=np.float32)
>>> for i in range(5):
... for j in range(5):
... S[i, j] = i + j # Update element
"""
format = 'dok'
def __init__(self, arg1, shape=None, dtype=None, copy=False):
dict.__init__(self)
spmatrix.__init__(self)
self.dtype = getdtype(dtype, default=float)
if isinstance(arg1, tuple) and isshape(arg1): # (M,N)
M, N = arg1
self.shape = (M, N)
elif isspmatrix(arg1): # Sparse ctor
if isspmatrix_dok(arg1) and copy:
arg1 = arg1.copy()
else:
arg1 = arg1.todok()
if dtype is not None:
arg1 = arg1.astype(dtype)
self.update(arg1)
self.shape = arg1.shape
self.dtype = arg1.dtype
else: # Dense ctor
try:
arg1 = np.asarray(arg1)
except:
raise TypeError('invalid input format')
if len(arg1.shape) != 2:
raise TypeError('expected rank <=2 dense array or matrix')
from .coo import coo_matrix
d = coo_matrix(arg1, dtype=dtype).todok()
self.update(d)
self.shape = arg1.shape
self.dtype = d.dtype
def getnnz(self, axis=None):
if axis is not None:
raise NotImplementedError("getnnz over an axis is not implemented "
"for DOK format")
return dict.__len__(self)
def count_nonzero(self):
return sum(x != 0 for x in itervalues(self))
getnnz.__doc__ = spmatrix.getnnz.__doc__
count_nonzero.__doc__ = spmatrix.count_nonzero.__doc__
def __len__(self):
return dict.__len__(self)
def get(self, key, default=0.):
"""This overrides the dict.get method, providing type checking
but otherwise equivalent functionality.
"""
try:
i, j = key
assert isintlike(i) and isintlike(j)
except (AssertionError, TypeError, ValueError):
raise IndexError('index must be a pair of integers')
if (i < 0 or i >= self.shape[0] or j < 0 or j >= self.shape[1]):
raise IndexError('index out of bounds')
return dict.get(self, key, default)
def __getitem__(self, index):
"""If key=(i,j) is a pair of integers, return the corresponding
element. If either i or j is a slice or sequence, return a new sparse
matrix with just these elements.
"""
zero = self.dtype.type(0)
i, j = self._unpack_index(index)
i_intlike = isintlike(i)
j_intlike = isintlike(j)
if i_intlike and j_intlike:
# Scalar index case
i = int(i)
j = int(j)
if i < 0:
i += self.shape[0]
if i < 0 or i >= self.shape[0]:
raise IndexError('index out of bounds')
if j < 0:
j += self.shape[1]
if j < 0 or j >= self.shape[1]:
raise IndexError('index out of bounds')
return dict.get(self, (i,j), zero)
elif ((i_intlike or isinstance(i, slice)) and
(j_intlike or isinstance(j, slice))):
# Fast path for slicing very sparse matrices
i_slice = slice(i, i+1) if i_intlike else i
j_slice = slice(j, j+1) if j_intlike else j
i_indices = i_slice.indices(self.shape[0])
j_indices = j_slice.indices(self.shape[1])
i_seq = xrange(*i_indices)
j_seq = xrange(*j_indices)
newshape = (len(i_seq), len(j_seq))
newsize = _prod(newshape)
if len(self) < 2*newsize and newsize != 0:
# Switch to the fast path only when advantageous
# (count the iterations in the loops, adjust for complexity)
#
# We also don't handle newsize == 0 here (if
# i/j_intlike, it can mean index i or j was out of
# bounds)
return self._getitem_ranges(i_indices, j_indices, newshape)
i, j = self._index_to_arrays(i, j)
if i.size == 0:
return dok_matrix(i.shape, dtype=self.dtype)
min_i = i.min()
if min_i < -self.shape[0] or i.max() >= self.shape[0]:
raise IndexError('index (%d) out of range -%d to %d)' %
(i.min(), self.shape[0], self.shape[0]-1))
if min_i < 0:
i = i.copy()
i[i < 0] += self.shape[0]
min_j = j.min()
if min_j < -self.shape[1] or j.max() >= self.shape[1]:
raise IndexError('index (%d) out of range -%d to %d)' %
(j.min(), self.shape[1], self.shape[1]-1))
if min_j < 0:
j = j.copy()
j[j < 0] += self.shape[1]
newdok = dok_matrix(i.shape, dtype=self.dtype)
for a in xrange(i.shape[0]):
for b in xrange(i.shape[1]):
v = dict.get(self, (i[a,b], j[a,b]), zero)
if v != 0:
dict.__setitem__(newdok, (a, b), v)
return newdok
def _getitem_ranges(self, i_indices, j_indices, shape):
# performance golf: we don't want Numpy scalars here, they are slow
i_start, i_stop, i_stride = map(int, i_indices)
j_start, j_stop, j_stride = map(int, j_indices)
newdok = dok_matrix(shape, dtype=self.dtype)
for (ii, jj) in self.keys():
# ditto for numpy scalars
ii = int(ii)
jj = int(jj)
a, ra = divmod(ii - i_start, i_stride)
if a < 0 or a >= shape[0] or ra != 0:
continue
b, rb = divmod(jj - j_start, j_stride)
if b < 0 or b >= shape[1] or rb != 0:
continue
dict.__setitem__(newdok, (a, b),
dict.__getitem__(self, (ii, jj)))
return newdok
def __setitem__(self, index, x):
if isinstance(index, tuple) and len(index) == 2:
# Integer index fast path
i, j = index
if (isintlike(i) and isintlike(j) and 0 <= i < self.shape[0]
and 0 <= j < self.shape[1]):
v = np.asarray(x, dtype=self.dtype)
if v.ndim == 0 and v != 0:
dict.__setitem__(self, (int(i), int(j)), v[()])
return
i, j = self._unpack_index(index)
i, j = self._index_to_arrays(i, j)
if isspmatrix(x):
x = x.toarray()
# Make x and i into the same shape
x = np.asarray(x, dtype=self.dtype)
x, _ = np.broadcast_arrays(x, i)
if x.shape != i.shape:
raise ValueError("shape mismatch in assignment")
if np.size(x) == 0:
return
min_i = i.min()
if min_i < -self.shape[0] or i.max() >= self.shape[0]:
raise IndexError('index (%d) out of range -%d to %d)' %
(i.min(), self.shape[0], self.shape[0]-1))
if min_i < 0:
i = i.copy()
i[i < 0] += self.shape[0]
min_j = j.min()
if min_j < -self.shape[1] or j.max() >= self.shape[1]:
raise IndexError('index (%d) out of range -%d to %d)' %
(j.min(), self.shape[1], self.shape[1]-1))
if min_j < 0:
j = j.copy()
j[j < 0] += self.shape[1]
dict.update(self, izip(izip(i.flat, j.flat), x.flat))
if 0 in x:
zeroes = x == 0
for key in izip(i[zeroes].flat, j[zeroes].flat):
if dict.__getitem__(self, key) == 0:
# may have been superseded by later update
del self[key]
def __add__(self, other):
# First check if argument is a scalar
if isscalarlike(other):
res_dtype = upcast_scalar(self.dtype, other)
new = dok_matrix(self.shape, dtype=res_dtype)
# Add this scalar to every element.
M, N = self.shape
for i in xrange(M):
for j in xrange(N):
aij = self.get((i, j), 0) + other
if aij != 0:
new[i, j] = aij
# new.dtype.char = self.dtype.char
elif isinstance(other, dok_matrix):
if other.shape != self.shape:
raise ValueError("matrix dimensions are not equal")
# We could alternatively set the dimensions to the largest of
# the two matrices to be summed. Would this be a good idea?
res_dtype = upcast(self.dtype, other.dtype)
new = dok_matrix(self.shape, dtype=res_dtype)
new.update(self)
with np.errstate(over='ignore'):
for key in other.keys():
new[key] += other[key]
elif isspmatrix(other):
csc = self.tocsc()
new = csc + other
elif isdense(other):
new = self.todense() + other
else:
return NotImplemented
return new
def __radd__(self, other):
# First check if argument is a scalar
if isscalarlike(other):
new = dok_matrix(self.shape, dtype=self.dtype)
# Add this scalar to every element.
M, N = self.shape
for i in xrange(M):
for j in xrange(N):
aij = self.get((i, j), 0) + other
if aij != 0:
new[i, j] = aij
elif isinstance(other, dok_matrix):
if other.shape != self.shape:
raise ValueError("matrix dimensions are not equal")
new = dok_matrix(self.shape, dtype=self.dtype)
new.update(self)
for key in other:
new[key] += other[key]
elif isspmatrix(other):
csc = self.tocsc()
new = csc + other
elif isdense(other):
new = other + self.todense()
else:
return NotImplemented
return new
def __neg__(self):
new = dok_matrix(self.shape, dtype=self.dtype)
for key in self.keys():
new[key] = -self[key]
return new
def _mul_scalar(self, other):
res_dtype = upcast_scalar(self.dtype, other)
# Multiply this scalar by every element.
new = dok_matrix(self.shape, dtype=res_dtype)
for (key, val) in iteritems(self):
new[key] = val * other
return new
def _mul_vector(self, other):
# matrix * vector
result = np.zeros(self.shape[0], dtype=upcast(self.dtype,other.dtype))
for (i,j),v in iteritems(self):
result[i] += v * other[j]
return result
def _mul_multivector(self, other):
# matrix * multivector
M,N = self.shape
n_vecs = other.shape[1] # number of column vectors
result = np.zeros((M,n_vecs), dtype=upcast(self.dtype,other.dtype))
for (i,j),v in iteritems(self):
result[i,:] += v * other[j,:]
return result
def __imul__(self, other):
if isscalarlike(other):
# Multiply this scalar by every element.
for (key, val) in iteritems(self):
self[key] = val * other
# new.dtype.char = self.dtype.char
return self
else:
return NotImplemented
def __truediv__(self, other):
if isscalarlike(other):
res_dtype = upcast_scalar(self.dtype, other)
new = dok_matrix(self.shape, dtype=res_dtype)
# Multiply this scalar by every element.
for (key, val) in iteritems(self):
new[key] = val / other
# new.dtype.char = self.dtype.char
return new
else:
return self.tocsr() / other
def __itruediv__(self, other):
if isscalarlike(other):
# Multiply this scalar by every element.
for (key, val) in iteritems(self):
self[key] = val / other
return self
else:
return NotImplemented
# What should len(sparse) return? For consistency with dense matrices,
# perhaps it should be the number of rows? For now it returns the number
# of non-zeros.
def transpose(self, axes=None, copy=False):
if axes is not None:
raise ValueError(("Sparse matrices do not support "
"an 'axes' parameter because swapping "
"dimensions is the only logical permutation."))
M, N = self.shape
new = dok_matrix((N, M), dtype=self.dtype, copy=copy)
for key, value in iteritems(self):
new[key[1], key[0]] = value
return new
transpose.__doc__ = spmatrix.transpose.__doc__
def conjtransp(self):
""" Return the conjugate transpose
"""
M, N = self.shape
new = dok_matrix((N, M), dtype=self.dtype)
for key, value in iteritems(self):
new[key[1], key[0]] = np.conj(value)
return new
def copy(self):
new = dok_matrix(self.shape, dtype=self.dtype)
new.update(self)
return new
copy.__doc__ = spmatrix.copy.__doc__
def getrow(self, i):
"""Returns a copy of row i of the matrix as a (1 x n)
DOK matrix.
"""
out = self.__class__((1, self.shape[1]), dtype=self.dtype)
for j in range(self.shape[1]):
out[0, j] = self[i, j]
return out
def getcol(self, j):
"""Returns a copy of column j of the matrix as a (m x 1)
DOK matrix.
"""
out = self.__class__((self.shape[0], 1), dtype=self.dtype)
for i in range(self.shape[0]):
out[i, 0] = self[i, j]
return out
def tocoo(self, copy=False):
from .coo import coo_matrix
if self.nnz == 0:
return coo_matrix(self.shape, dtype=self.dtype)
idx_dtype = get_index_dtype(maxval=max(self.shape))
data = np.asarray(_list(self.values()), dtype=self.dtype)
indices = np.asarray(_list(self.keys()), dtype=idx_dtype).T
A = coo_matrix((data, indices), shape=self.shape, dtype=self.dtype)
A.has_canonical_format = True
return A
tocoo.__doc__ = spmatrix.tocoo.__doc__
def todok(self, copy=False):
if copy:
return self.copy()
else:
return self
todok.__doc__ = spmatrix.todok.__doc__
def tocsc(self, copy=False):
return self.tocoo(copy=False).tocsc(copy=copy)
tocsc.__doc__ = spmatrix.tocsc.__doc__
def resize(self, shape):
""" Resize the matrix in-place to dimensions given by 'shape'.
Any non-zero elements that lie outside the new shape are removed.
"""
if not isshape(shape):
raise TypeError("dimensions must be a 2-tuple of positive"
" integers")
newM, newN = shape
M, N = self.shape
if newM < M or newN < N:
# Remove all elements outside new dimensions
for (i, j) in list(self.keys()):
if i >= newM or j >= newN:
del self[i, j]
self._shape = shape
def _list(x):
"""Force x to a list."""
if not isinstance(x, list):
x = list(x)
return x
def isspmatrix_dok(x):
return isinstance(x, dok_matrix)
def _prod(x):
"""Product of a list of numbers; ~40x faster vs np.prod for Python tuples"""
if len(x) == 0:
return 1
return functools.reduce(operator.mul, x)
| yuanagain/seniorthesis | venv/lib/python2.7/site-packages/scipy/sparse/dok.py | Python | mit | 17,654 | 0.000906 |
# Copyright 2018 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import test_accounting
from . import test_donation
| mozaik-association/mozaik | mozaik_account/tests/__init__.py | Python | agpl-3.0 | 153 | 0 |
#!/usr/bin/env python3
#
# Copyright 2021 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper script to run arbitrary bash files from GN.
This script should be used only when absolutely necessary and never in a
cross-platform way (that is, it should only be used for an action on a
particular platform, not an platform-independent target).
"""
import logging
import subprocess
import sys
if __name__ == '__main__':
logging_format = '[%(levelname)s:%(filename)s:%(lineno)s] %(message)s'
logging.basicConfig(
level=logging.INFO, format=logging_format, datefmt='%H:%M:%S')
logging.warning('Calling a bash process during GN build. '
'Avoid doing this whenever possible.')
sys.exit(subprocess.call(sys.argv[1:]))
| youtube/cobalt | starboard/build/run_bash.py | Python | bsd-3-clause | 1,290 | 0.003101 |
"""Added a table for timed commands
Revision ID: 4db5dc4bc98
Revises: 514f4b9bc74
Create Date: 2015-12-23 00:00:59.156496
"""
# revision identifiers, used by Alembic.
revision = '4db5dc4bc98'
down_revision = '514f4b9bc74'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('tb_timer',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=256), nullable=False),
sa.Column('action', mysql.TEXT(), nullable=False),
sa.Column('interval_online', sa.Integer(), nullable=False),
sa.Column('interval_offline', sa.Integer(), nullable=False),
sa.Column('enabled', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tb_timer')
### end Alembic commands ###
| gigglearrows/anniesbot | alembic/versions/4db5dc4bc98_added_a_table_for_timed_commands.py | Python | mit | 1,034 | 0.014507 |
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from collections import deque
from proboscis import test
from proboscis import asserts
from proboscis import after_class
from proboscis import before_class
from trove.tests.config import CONFIG
from trove.tests.api.instances import instance_info
from trove.tests.api.instances import VOLUME_SUPPORT
from trove.tests.util.users import Requirements
from trove.tests.util import assert_contains
from trove.tests.util import create_dbaas_client
from trove.common.utils import poll_until
@test(groups=["dbaas.api.mgmt.malformed_json"])
class MalformedJson(object):
@before_class
def setUp(self):
self.reqs = Requirements(is_admin=False)
self.user = CONFIG.users.find_user(self.reqs)
self.dbaas = create_dbaas_client(self.user)
volume = None
if VOLUME_SUPPORT:
volume = {"size": 1}
self.instance = self.dbaas.instances.create(
name="qe_instance",
flavor_id=instance_info.dbaas_flavor_href,
volume=volume,
databases=[{"name": "firstdb", "character_set": "latin2",
"collate": "latin2_general_ci"}])
@after_class
def tearDown(self):
self.dbaas.instances.delete(self.instance)
@test
def test_bad_instance_data(self):
databases = "foo"
users = "bar"
try:
self.dbaas.instances.create("bad_instance", 3, 3,
databases=databases, users=users)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s,"
" exception %s" % (httpCode, e))
databases = "u'foo'"
users = "u'bar'"
assert_contains(
e.message,
["Validation error:",
"instance['databases'] %s is not of type 'array'" % databases,
"instance['users'] %s is not of type 'array'" % users,
"instance['volume'] 3 is not of type 'object'"])
@test
def test_bad_database_data(self):
_bad_db_data = "{foo}"
try:
self.dbaas.databases.create(self.instance.id, _bad_db_data)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create database failed with code %s, "
"exception %s" % (httpCode, e))
_bad_db_data = "u'{foo}'"
asserts.assert_equal(e.message,
"Validation error: "
"databases %s is not of type 'array'" %
_bad_db_data)
@test
def test_bad_user_data(self):
def format_path(values):
values = list(values)
msg = "%s%s" % (values[0],
''.join(['[%r]' % i for i in values[1:]]))
return msg
_user = []
_user_name = "F343jasdf"
_user.append({"name12": _user_name,
"password12": "password"})
try:
self.dbaas.users.create(self.instance.id, _user)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create user failed with code %s, "
"exception %s" % (httpCode, e))
err_1 = format_path(deque(('users', 0)))
assert_contains(
e.message,
["Validation error:",
"%(err_1)s 'name' is a required property" % {'err_1': err_1},
"%(err_1)s 'password' is a required property"
% {'err_1': err_1}])
@test
def test_bad_resize_instance_data(self):
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.instances.resize_instance(self.instance.id, "bad data")
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Resize instance failed with code %s, "
"exception %s" % (httpCode, e))
@test
def test_bad_resize_vol_data(self):
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
data = "bad data"
try:
self.dbaas.instances.resize_volume(self.instance.id, data)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Resize instance failed with code %s, "
"exception %s" % (httpCode, e))
data = "u'bad data'"
assert_contains(
e.message,
["Validation error:",
"resize['volume']['size'] %s is not valid under "
"any of the given schemas" % data,
"%s is not of type 'integer'" % data,
"%s does not match '[0-9]+'" % data])
@test
def test_bad_change_user_password(self):
password = ""
users = [{"name": password}]
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.users.change_passwords(self.instance, users)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Change usr/passwd failed with code %s, "
"exception %s" % (httpCode, e))
password = "u''"
assert_contains(
e.message,
["Validation error: users[0] 'password' "
"is a required property",
"users[0]['name'] %s is too short" % password,
"users[0]['name'] %s does not match "
"'^.*[0-9a-zA-Z]+.*$'" % password])
@test
def test_bad_grant_user_access(self):
dbs = []
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.users.grant(self.instance, self.user, dbs)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Grant user access failed with code %s, "
"exception %s" % (httpCode, e))
@test
def test_bad_revoke_user_access(self):
db = ""
def _check_instance_status():
inst = self.dbaas.instances.get(self.instance)
if inst.status == "ACTIVE":
return True
else:
return False
poll_until(_check_instance_status)
try:
self.dbaas.users.revoke(self.instance, self.user, db)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 404,
"Revoke user access failed w/code %s, "
"exception %s" % (httpCode, e))
asserts.assert_equal(e.message, "The resource could not be found.")
@test
def test_bad_body_flavorid_create_instance(self):
flavorId = ["?"]
try:
self.dbaas.instances.create("test_instance",
flavorId,
2)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s, "
"exception %s" % (httpCode, e))
flavorId = [u'?']
assert_contains(
e.message,
["Validation error:",
"instance['flavorRef'] %s is not valid "
"under any of the given schemas" % flavorId,
"%s is not of type 'string'" % flavorId,
"%s is not of type 'string'" % flavorId,
"%s is not of type 'integer'" % flavorId,
"instance['volume'] 2 is not of type 'object'"])
@test
def test_bad_body_datastore_create_instance(self):
datastore = "*"
datastore_version = "*"
try:
self.dbaas.instances.create("test_instance",
3, {"size": 2},
datastore=datastore,
datastore_version=datastore_version)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s, "
"exception %s" % (httpCode, e))
assert_contains(
e.message,
["Validation error:",
"instance['datastore']['type']"
" u'%s' does not match"
" '^.*[0-9a-zA-Z]+.*$'" % datastore,
"instance['datastore']['version'] u'%s' "
"does not match '^.*[0-9a-zA-Z]+.*$'" % datastore_version])
@test
def test_bad_body_volsize_create_instance(self):
volsize = "h3ll0"
try:
self.dbaas.instances.create("test_instance",
"1",
volsize)
except Exception as e:
resp, body = self.dbaas.client.last_response
httpCode = resp.status
asserts.assert_equal(httpCode, 400,
"Create instance failed with code %s, "
"exception %s" % (httpCode, e))
volsize = "u'h3ll0'"
asserts.assert_equal(e.message,
"Validation error: "
"instance['volume'] %s is not of "
"type 'object'" % volsize)
| changsimon/trove | trove/tests/api/mgmt/malformed_json.py | Python | apache-2.0 | 12,085 | 0 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import unittest
from abc import abstractmethod
from contextlib import contextmanager
from pants.base.scm_project_tree import ScmProjectTree
from pants.engine.fs import (Dir, DirectoryListing, Dirs, FileContent, Files, Link, Path, PathGlobs,
ReadLink, Stat, Stats)
from pants.engine.nodes import FilesystemNode
from pants.util.meta import AbstractClass
from pants_test.engine.scheduler_test_base import SchedulerTestBase
from pants_test.testutils.git_util import MIN_REQUIRED_GIT_VERSION, git_version, initialize_repo
class FSTestBase(SchedulerTestBase, AbstractClass):
_original_src = os.path.join(os.path.dirname(__file__), 'examples/fs_test')
@abstractmethod
@contextmanager
def mk_project_tree(self, build_root_src):
"""Construct a ProjectTree for the given src path."""
pass
def specs(self, ftype, relative_to, *filespecs):
return PathGlobs.create_from_specs(ftype, relative_to, filespecs)
def assert_walk(self, ftype, filespecs, files):
with self.mk_project_tree(self._original_src) as project_tree:
scheduler, storage = self.mk_scheduler(project_tree=project_tree)
result = self.execute(scheduler, storage, Stat, self.specs(ftype, '', *filespecs))[0]
self.assertEquals(set(files), set([p.path for p in result]))
def assert_content(self, filespecs, expected_content):
with self.mk_project_tree(self._original_src) as project_tree:
scheduler, storage = self.mk_scheduler(project_tree=project_tree)
result = self.execute(scheduler, storage, FileContent, self.specs(Files, '', *filespecs))[0]
def validate(e):
self.assertEquals(type(e), FileContent)
return True
actual_content = {f.path: f.content for f in result if validate(f)}
self.assertEquals(expected_content, actual_content)
def assert_fsnodes(self, ftype, filespecs, subject_product_pairs):
with self.mk_project_tree(self._original_src) as project_tree:
scheduler, storage = self.mk_scheduler(project_tree=project_tree)
request = self.execute_request(scheduler, storage, Stat, self.specs(ftype, '', *filespecs))
# Validate that FilesystemNodes for exactly the given subjects are reachable under this
# request.
fs_nodes = [n for n, _ in scheduler.product_graph.walk(roots=request.roots)
if type(n) is FilesystemNode]
self.assertEquals(set((n.subject, n.product) for n in fs_nodes), set(subject_product_pairs))
def test_walk_literal(self):
self.assert_walk(Files, ['4.txt'], ['4.txt'])
self.assert_walk(Files, ['a/b/1.txt', 'a/b/2'], ['a/b/1.txt', 'a/b/2'])
self.assert_walk(Files, ['c.ln/2'], ['a/b/2'])
self.assert_walk(Files, ['d.ln/b/1.txt'], ['a/b/1.txt'])
self.assert_walk(Files, ['a/3.txt'], ['a/3.txt'])
self.assert_walk(Files, ['z.txt'], [])
def test_walk_literal_directory(self):
self.assert_walk(Dirs, ['c.ln'], ['a/b'])
self.assert_walk(Dirs, ['a'], ['a'])
self.assert_walk(Dirs, ['a/b'], ['a/b'])
self.assert_walk(Dirs, ['z'], [])
self.assert_walk(Dirs, ['4.txt', 'a/3.txt'], [])
def test_walk_siblings(self):
self.assert_walk(Files, ['*.txt'], ['4.txt'])
self.assert_walk(Files, ['a/b/*.txt'], ['a/b/1.txt'])
self.assert_walk(Files, ['c.ln/*.txt'], ['a/b/1.txt'])
self.assert_walk(Files, ['a/b/*'], ['a/b/1.txt', 'a/b/2'])
self.assert_walk(Files, ['*/0.txt'], [])
def test_walk_recursive(self):
self.assert_walk(Files, ['**/*.txt.ln'], ['4.txt'])
self.assert_walk(Files, ['**/*.txt'], ['a/3.txt', 'a/b/1.txt'])
self.assert_walk(Files, ['*.txt', '**/*.txt'], ['a/3.txt', 'a/b/1.txt', '4.txt'])
self.assert_walk(Files, ['*', '**/*'], ['a/3.txt', 'a/b/1.txt', '4.txt', 'a/b/2'])
self.assert_walk(Files, ['**/3.t*t'], ['a/3.txt'])
self.assert_walk(Files, ['**/*.zzz'], [])
def test_walk_recursive_directory(self):
self.assert_walk(Dirs, ['*'], ['a', 'a/b'])
self.assert_walk(Dirs, ['*/*'], ['a/b'])
self.assert_walk(Dirs, ['**/*'], ['a/b'])
self.assert_walk(Dirs, ['*/*/*'], [])
def test_files_content_literal(self):
self.assert_content(['4.txt'], {'4.txt': 'four\n'})
self.assert_content(['a/4.txt.ln'], {'4.txt': 'four\n'})
def test_files_content_directory(self):
with self.assertRaises(Exception):
self.assert_content(['a/b/'], {'a/b/': 'nope\n'})
with self.assertRaises(Exception):
self.assert_content(['a/b'], {'a/b': 'nope\n'})
def test_nodes_file(self):
self.assert_fsnodes(Files, ['4.txt'], [
(Path('4.txt'), Stats),
])
def test_nodes_symlink_file(self):
self.assert_fsnodes(Files, ['c.ln/2'], [
(Link('c.ln'), ReadLink),
(Path('c.ln'), Stats),
(Path('a/b'), Stats),
(Path('a/b/2'), Stats),
])
self.assert_fsnodes(Files, ['d.ln/b/1.txt'], [
(Path('d.ln'), Stats),
(Link('d.ln'), ReadLink),
(Path('a'), Stats),
(Path('a/b'), Stats),
(Path('a/b/1.txt'), Stats),
])
def test_nodes_symlink_globbed_dir(self):
self.assert_fsnodes(Files, ['*/2'], [
# Glob the root.
(Dir(''), DirectoryListing),
# Stat each entry.
(Path('a'), Stats),
(Path('c.ln'), Stats),
(Path('d.ln'), Stats),
(Path('4.txt'), Stats),
# Read links to determine whether they're actually directories.
(Link('c.ln'), ReadLink),
(Link('d.ln'), ReadLink),
# Stat the detination of one link (the other was already stat'd during the initial list).
(Path('a/b'), Stats),
# Look up the literal in each path.
(Path('a/b/2'), Stats),
(Path('a/2'), Stats),
])
def test_nodes_symlink_globbed_file(self):
self.assert_fsnodes(Files, ['d.ln/b/*.txt'], [
# NB: Needs to stat every path on the way down to track whether
# it is traversing a symlink.
(Path('d.ln'), Stats),
(Link('d.ln'), ReadLink),
(Path('a'), Stats),
(Path('a/b'), Stats),
(Dir('a/b'), DirectoryListing),
(Path('a/b/2'), Stats),
(Path('a/b/1.txt'), Stats),
])
class PosixFSTest(unittest.TestCase, FSTestBase):
@contextmanager
def mk_project_tree(self, build_root_src):
yield self.mk_fs_tree(build_root_src)
@unittest.skipIf(git_version() < MIN_REQUIRED_GIT_VERSION,
'The GitTest requires git >= {}.'.format(MIN_REQUIRED_GIT_VERSION))
class GitFSTest(unittest.TestCase, FSTestBase):
@contextmanager
def mk_project_tree(self, build_root_src):
# Use mk_fs_tree only to feed the files for the git repo, not using its FileSystemProjectTree.
worktree = self.mk_fs_tree(build_root_src).build_root
with initialize_repo(worktree) as git_repo:
yield ScmProjectTree(worktree, git_repo, 'HEAD')
@unittest.skip('https://github.com/pantsbuild/pants/issues/3281')
def test_walk_recursive(self):
super(GitFSTest, self).test_walk_recursive()
@unittest.skip('https://github.com/pantsbuild/pants/issues/3281')
def test_files_content_literal(self):
super(GitFSTest, self).test_files_content_literal()
| ity/pants | tests/python/pants_test/engine/test_fs.py | Python | apache-2.0 | 7,436 | 0.007934 |
#
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
# Copyright (C) 2006 - 2007 Richard Purdie
#
# SPDX-License-Identifier: GPL-2.0-only
#
"""
Use this class to fork off a thread to recieve event callbacks from the bitbake
server and queue them for the UI to process. This process must be used to avoid
client/server deadlocks.
"""
import socket, threading, pickle, collections
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
class BBUIEventQueue:
def __init__(self, BBServer, clientinfo=("localhost, 0")):
self.eventQueue = []
self.eventQueueLock = threading.Lock()
self.eventQueueNotify = threading.Event()
self.BBServer = BBServer
self.clientinfo = clientinfo
server = UIXMLRPCServer(self.clientinfo)
self.host, self.port = server.socket.getsockname()
server.register_function( self.system_quit, "event.quit" )
server.register_function( self.send_event, "event.sendpickle" )
server.socket.settimeout(1)
self.EventHandle = None
# the event handler registration may fail here due to cooker being in invalid state
# this is a transient situation, and we should retry a couple of times before
# giving up
for count_tries in range(5):
ret = self.BBServer.registerEventHandler(self.host, self.port)
if isinstance(ret, collections.Iterable):
self.EventHandle, error = ret
else:
self.EventHandle = ret
error = ""
if self.EventHandle != None:
break
errmsg = "Could not register UI event handler. Error: %s, host %s, "\
"port %d" % (error, self.host, self.port)
bb.warn("%s, retry" % errmsg)
import time
time.sleep(1)
else:
raise Exception(errmsg)
self.server = server
self.t = threading.Thread()
self.t.setDaemon(True)
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
self.eventQueueLock.acquire()
if len(self.eventQueue) == 0:
self.eventQueueLock.release()
return None
item = self.eventQueue.pop(0)
if len(self.eventQueue) == 0:
self.eventQueueNotify.clear()
self.eventQueueLock.release()
return item
def waitEvent(self, delay):
self.eventQueueNotify.wait(delay)
return self.getEvent()
def queue_event(self, event):
self.eventQueueLock.acquire()
self.eventQueue.append(event)
self.eventQueueNotify.set()
self.eventQueueLock.release()
def send_event(self, event):
self.queue_event(pickle.loads(event))
def startCallbackHandler(self):
self.server.timeout = 1
bb.utils.set_process_name("UIEventQueue")
while not self.server.quit:
try:
self.server.handle_request()
except Exception as e:
import traceback
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc()))
self.server.server_close()
def system_quit( self ):
"""
Shut down the callback thread
"""
try:
self.BBServer.unregisterEventHandler(self.EventHandle)
except:
pass
self.server.quit = True
class UIXMLRPCServer (SimpleXMLRPCServer):
def __init__( self, interface ):
self.quit = False
SimpleXMLRPCServer.__init__( self,
interface,
requestHandler=SimpleXMLRPCRequestHandler,
logRequests=False, allow_none=True, use_builtin_types=True)
def get_request(self):
while not self.quit:
try:
sock, addr = self.socket.accept()
sock.settimeout(1)
return (sock, addr)
except socket.timeout:
pass
return (None, None)
def close_request(self, request):
if request is None:
return
SimpleXMLRPCServer.close_request(self, request)
def process_request(self, request, client_address):
if request is None:
return
SimpleXMLRPCServer.process_request(self, request, client_address)
| schleichdi2/OPENNFR-6.3-CORE | bitbake/lib/bb/ui/uievent.py | Python | gpl-2.0 | 4,475 | 0.00514 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""File used to unit test the pacifica archive interface."""
import unittest
import os
from stat import ST_MODE
from six import PY2
from pacifica.archiveinterface.archive_utils import bytes_type
from pacifica.archiveinterface.backends.posix.archive import PosixBackendArchive
import pacifica.archiveinterface.config as pa_config
from .common_setup_test import SetupTearDown
class TestPosixBackendArchive(unittest.TestCase, SetupTearDown):
"""Test the Posix backend archive."""
def test_posix_backend_create(self):
"""Test creating a posix backend."""
backend = PosixBackendArchive('/tmp')
self.assertTrue(isinstance(backend, PosixBackendArchive))
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._prefix, '/tmp')
# pylint: enable=protected-access
def test_posix_backend_open(self):
"""Test opening a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp')
my_file = backend.open(filepath, mode)
self.assertTrue(isinstance(my_file, PosixBackendArchive))
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile')
# pylint: enable=protected-access
my_file.close()
def test_posix_backend_stage(self):
"""Test staging a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp')
my_file = backend.open(filepath, mode)
my_file.stage()
# pylint: disable=protected-access
self.assertTrue(my_file._file._staged)
# pylint: enable=protected-access
my_file.close()
def test_posix_backend_open_twice(self):
"""Test opening a file from posix backend twice."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp')
my_file = backend.open(filepath, mode)
my_file = backend.open(filepath, mode)
self.assertTrue(isinstance(my_file, PosixBackendArchive))
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile')
# pylint: enable=protected-access
my_file.close()
def test_posix_backend_open_id2f(self):
"""Test opening a file from posix backend twice."""
backend = PosixBackendArchive('/tmp')
mode = 'w'
my_file = backend.open('/a/b/d', mode)
temp_cfg_file = pa_config.CONFIG_FILE
pa_config.CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test_configs', 'posix-id2filename.cfg')
backend = PosixBackendArchive('/tmp')
my_file = backend.open(12345, mode)
my_file.write('this is file 12345')
my_file.close()
# pylint: disable=protected-access
my_file.patch(123456789, '/tmp{}'.format(my_file._id2filename(12345)))
# pylint: enable=protected-access
my_file = backend.open(123456789, 'r')
text = my_file.read(-1)
pa_config.CONFIG_FILE = temp_cfg_file
self.assertTrue(isinstance(my_file, PosixBackendArchive))
self.assertEqual(bytes_type('this is file 12345'), text)
my_file.close()
def test_posix_backend_close(self):
"""Test closing a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
# easiest way to unit test is look at class variable
# pylint: disable=protected-access
self.assertEqual(backend._file.__class__.__name__, 'ExtendedFile')
my_file.close()
self.assertEqual(backend._file, None)
# pylint: enable=protected-access
def test_posix_backend_write(self):
"""Test writing a file from posix backend."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
error = my_file.write('i am a test string')
if PY2:
self.assertEqual(error, None)
else:
self.assertEqual(error, 18)
my_file.close()
def test_posix_file_mod_time(self):
"""Test the correct setting of a file mod time."""
filepath = '1234'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
my_file.close()
my_file.set_mod_time(1000000)
my_file = backend.open(filepath, 'r')
status = my_file.status()
my_file.close()
self.assertEqual(status.mtime, 1000000)
def test_posix_file_permissions(self):
"""Test the correct setting of a file mod time."""
filepath = '12345'
mode = 'w'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
my_file.close()
my_file.set_file_permissions()
statinfo = oct(os.stat('/tmp/12345')[ST_MODE])[-3:]
self.assertEqual(statinfo, '444')
def test_posix_backend_read(self):
"""Test reading a file from posix backend."""
self.test_posix_backend_write()
filepath = '1234'
mode = 'r'
backend = PosixBackendArchive('/tmp/')
my_file = backend.open(filepath, mode)
buf = my_file.read(-1)
self.assertEqual(buf, bytes_type('i am a test string'))
my_file.close()
def test_patch(self):
"""Test patching file."""
old_path = '/tmp/1234'
backend = PosixBackendArchive('/tmp')
my_file = backend.open('1234', 'w')
my_file.close()
backend.patch('5678', '/tmp/1234')
# Error would be thrown on patch so nothing to assert
self.assertEqual(old_path, '/tmp/1234')
def test_seek(self):
"""Test patching file."""
backend = PosixBackendArchive('/tmp')
my_file = backend.open('1234', 'w')
my_file.write('something')
my_file.close()
my_file = backend.open('1234', 'r')
my_file.seek(4)
data = my_file.read(-1).decode('utf8')
self.assertEqual(data, 'thing')
| dmlb2000/pacifica-archiveinterface | tests/posix_test.py | Python | lgpl-3.0 | 6,379 | 0.000314 |
# coding: utf-8
"""
Each new term in the Fibonacci sequence is generated by adding the previous two
terms. By starting with 1 and 2, the first 10 terms will be:
1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
By considering the terms in the Fibonacci sequence whose values do not exceed
four million, find the sum of the even-valued terms.
"""
import itertools
import time
def oneliner():
fib = lambda x: fib(x-1)+fib(x-2) if x > 2 else 1
return sum(
fib(z) for z
in itertools.takewhile(
lambda n: fib(n) <= 4*10**6,
itertools.count()
)
if fib(z) % 2 == 0
)
def impl1():
oldfib = 1
fib = 1
even = []
while fib <= 4*10**6:
if fib % 2 == 0:
even.append(fib)
fib, oldfib = fib + oldfib, fib
return sum(even)
def impl2():
prev, fib = 1, 1
_sum = 0
while fib <= 4*10**6:
if fib % 2 == 0:
_sum += fib
fib, prev = fib + prev, fib
return _sum
if __name__ == "__main__":
def timeit(function):
t1 = time.time()
output = function()
t2 = time.time()
return output, t2-t1
print timeit(impl1)
print timeit(impl2)
print timeit(oneliner)
| artcz/euler | problems/02/2.py | Python | mit | 1,241 | 0.001612 |
# -*- coding: utf-8 -*-
"""The QCOW image file-like object."""
import pyqcow
from dfvfs import dependencies
from dfvfs.file_io import file_object_io
from dfvfs.lib import errors
from dfvfs.resolver import resolver
dependencies.CheckModuleVersion(u'pyqcow')
class QcowFile(file_object_io.FileObjectIO):
"""Class that implements a file-like object using pyqcow."""
def _OpenFileObject(self, path_spec):
"""Opens the file-like object defined by path specification.
Args:
path_spec: the path specification (instance of path.PathSpec).
Returns:
A file-like object.
Raises:
PathSpecError: if the path specification is incorrect.
"""
if not path_spec.HasParent():
raise errors.PathSpecError(
u'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
qcow_file = pyqcow.file()
qcow_file.open_file_object(file_object)
return qcow_file
def get_size(self):
"""Returns the size of the file-like object.
Raises:
IOError: if the file-like object has not been opened.
"""
if not self._is_open:
raise IOError(u'Not opened.')
return self._file_object.get_media_size()
| jorik041/dfvfs | dfvfs/file_io/qcow_file_io.py | Python | apache-2.0 | 1,285 | 0.003891 |
#!/home/jt/code/armyguys/venv/bin/python3.4
# $Id: rst2xetex.py 7038 2011-05-19 09:12:02Z milde $
# Author: Guenter Milde
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing XeLaTeX source code.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline
description = ('Generates XeLaTeX documents from standalone reStructuredText '
'sources. '
'Reads from <source> (default is stdin) and writes to '
'<destination> (default is stdout). See '
'<http://docutils.sourceforge.net/docs/user/latex.html> for '
'the full reference.')
publish_cmdline(writer_name='xetex', description=description)
| jtpaasch/armyguys | venv/bin/rst2xetex.py | Python | mit | 811 | 0.001233 |
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The volumes snapshots api."""
from oslo_utils import strutils
import webob
from webob import exc
from cinder.api import common
from cinder.api.openstack import wsgi
from cinder.api import xmlutil
from cinder import exception
from cinder.i18n import _, _LI
from cinder.openstack.common import log as logging
from cinder import utils
from cinder import volume
LOG = logging.getLogger(__name__)
def _translate_snapshot_detail_view(context, snapshot):
"""Maps keys for snapshots details view."""
d = _translate_snapshot_summary_view(context, snapshot)
# NOTE(gagupta): No additional data / lookups at the moment
return d
def _translate_snapshot_summary_view(context, snapshot):
"""Maps keys for snapshots summary view."""
d = {}
d['id'] = snapshot['id']
d['created_at'] = snapshot['created_at']
d['display_name'] = snapshot['display_name']
d['display_description'] = snapshot['display_description']
d['volume_id'] = snapshot['volume_id']
d['status'] = snapshot['status']
d['size'] = snapshot['volume_size']
if snapshot.get('metadata') and isinstance(snapshot.get('metadata'),
dict):
d['metadata'] = snapshot['metadata']
else:
d['metadata'] = {}
return d
def make_snapshot(elem):
elem.set('id')
elem.set('status')
elem.set('size')
elem.set('created_at')
elem.set('display_name')
elem.set('display_description')
elem.set('volume_id')
elem.append(common.MetadataTemplate())
class SnapshotTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('snapshot', selector='snapshot')
make_snapshot(root)
return xmlutil.MasterTemplate(root, 1)
class SnapshotsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('snapshots')
elem = xmlutil.SubTemplateElement(root, 'snapshot',
selector='snapshots')
make_snapshot(elem)
return xmlutil.MasterTemplate(root, 1)
class SnapshotsController(wsgi.Controller):
"""The Snapshots API controller for the OpenStack API."""
def __init__(self, ext_mgr=None):
self.volume_api = volume.API()
self.ext_mgr = ext_mgr
super(SnapshotsController, self).__init__()
@wsgi.serializers(xml=SnapshotTemplate)
def show(self, req, id):
"""Return data about the given snapshot."""
context = req.environ['cinder.context']
try:
snapshot = self.volume_api.get_snapshot(context, id)
req.cache_db_snapshot(snapshot)
except exception.NotFound:
raise exc.HTTPNotFound()
return {'snapshot': _translate_snapshot_detail_view(context, snapshot)}
def delete(self, req, id):
"""Delete a snapshot."""
context = req.environ['cinder.context']
LOG.info(_LI("Delete snapshot with id: %s"), id, context=context)
try:
snapshot = self.volume_api.get_snapshot(context, id)
self.volume_api.delete_snapshot(context, snapshot)
except exception.NotFound:
raise exc.HTTPNotFound()
return webob.Response(status_int=202)
@wsgi.serializers(xml=SnapshotsTemplate)
def index(self, req):
"""Returns a summary list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_summary_view)
@wsgi.serializers(xml=SnapshotsTemplate)
def detail(self, req):
"""Returns a detailed list of snapshots."""
return self._items(req, entity_maker=_translate_snapshot_detail_view)
def _items(self, req, entity_maker):
"""Returns a list of snapshots, transformed through entity_maker."""
context = req.environ['cinder.context']
# pop out limit and offset , they are not search_opts
search_opts = req.GET.copy()
search_opts.pop('limit', None)
search_opts.pop('offset', None)
# filter out invalid option
allowed_search_options = ('status', 'volume_id', 'display_name')
utils.remove_invalid_filter_options(context, search_opts,
allowed_search_options)
snapshots = self.volume_api.get_all_snapshots(context,
search_opts=search_opts)
limited_list = common.limited(snapshots, req)
req.cache_db_snapshots(limited_list)
res = [entity_maker(context, snapshot) for snapshot in limited_list]
return {'snapshots': res}
@wsgi.serializers(xml=SnapshotTemplate)
def create(self, req, body):
"""Creates a new snapshot."""
kwargs = {}
context = req.environ['cinder.context']
if not self.is_valid_body(body, 'snapshot'):
raise exc.HTTPUnprocessableEntity()
snapshot = body['snapshot']
kwargs['metadata'] = snapshot.get('metadata', None)
try:
volume_id = snapshot['volume_id']
except KeyError:
msg = _("'volume_id' must be specified")
raise exc.HTTPBadRequest(explanation=msg)
try:
volume = self.volume_api.get(context, volume_id)
except exception.NotFound:
raise exc.HTTPNotFound()
force = snapshot.get('force', False)
msg = _("Create snapshot from volume %s")
LOG.info(msg, volume_id, context=context)
if not utils.is_valid_boolstr(force):
msg = _("Invalid value '%s' for force. ") % force
raise exception.InvalidParameterValue(err=msg)
if strutils.bool_from_string(force):
new_snapshot = self.volume_api.create_snapshot_force(
context,
volume,
snapshot.get('display_name'),
snapshot.get('display_description'),
**kwargs)
else:
new_snapshot = self.volume_api.create_snapshot(
context,
volume,
snapshot.get('display_name'),
snapshot.get('display_description'),
**kwargs)
req.cache_db_snapshot(new_snapshot)
retval = _translate_snapshot_detail_view(context, new_snapshot)
return {'snapshot': retval}
@wsgi.serializers(xml=SnapshotTemplate)
def update(self, req, id, body):
"""Update a snapshot."""
context = req.environ['cinder.context']
if not body:
raise exc.HTTPUnprocessableEntity()
if 'snapshot' not in body:
raise exc.HTTPUnprocessableEntity()
snapshot = body['snapshot']
update_dict = {}
valid_update_keys = (
'display_name',
'display_description',
)
for key in valid_update_keys:
if key in snapshot:
update_dict[key] = snapshot[key]
try:
snapshot = self.volume_api.get_snapshot(context, id)
self.volume_api.update_snapshot(context, snapshot, update_dict)
except exception.NotFound:
raise exc.HTTPNotFound()
snapshot.update(update_dict)
req.cache_db_snapshot(snapshot)
return {'snapshot': _translate_snapshot_detail_view(context, snapshot)}
def create_resource(ext_mgr):
return wsgi.Resource(SnapshotsController(ext_mgr))
| Akrog/cinder | cinder/api/v1/snapshots.py | Python | apache-2.0 | 8,045 | 0 |
import xml.sax
import unittest
import test_utils
import xmlreader
import os
path = os.path.dirname(os.path.abspath(__file__) )
class XmlReaderTestCase(unittest.TestCase):
def test_XmlDumpAllRevs(self):
pages = [r for r in xmlreader.XmlDump(path + "/data/article-pear.xml", allrevisions=True).parse()]
self.assertEquals(4, len(pages))
self.assertEquals(u"Automated conversion", pages[0].comment)
self.assertEquals(u"Pear", pages[0].title)
self.assertEquals(u"24278", pages[0].id)
self.assertTrue(pages[0].text.startswith('Pears are [[tree]]s of'))
self.assertEquals(u"Quercusrobur", pages[1].username)
self.assertEquals(u"Pear", pages[0].title)
def test_XmlDumpFirstRev(self):
pages = [r for r in xmlreader.XmlDump(path + "/data/article-pear.xml").parse()]
self.assertEquals(1, len(pages))
self.assertEquals(u"Automated conversion", pages[0].comment)
self.assertEquals(u"Pear", pages[0].title)
self.assertEquals(u"24278", pages[0].id)
self.assertTrue(pages[0].text.startswith('Pears are [[tree]]s of'))
self.assertTrue(not pages[0].isredirect)
def test_XmlDumpRedirect(self):
pages = [r for r in xmlreader.XmlDump(path + "/data/article-pyrus.xml").parse()]
self.assertTrue(pages[0].isredirect)
def test_MediaWikiXmlHandler(self):
handler = xmlreader.MediaWikiXmlHandler()
pages = []
def pageDone(page):
pages.append(page)
handler.setCallback(pageDone)
xml.sax.parse(path + "/data/article-pear.xml", handler)
self.assertEquals(u"Pear", pages[0].title)
self.assertEquals(4, len(pages))
self.assertNotEquals("", pages[0].comment)
if __name__ == '__main__':
unittest.main()
| races1986/SafeLanguage | CEM/tests/test_xmlreader.py | Python | epl-1.0 | 1,809 | 0.00387 |
# coding=utf-8
# Module stencil_24
# generated from Stencil 18pt
name = "Stencil 24"
start_char = '!'
end_char = chr(127)
char_height = 24
space_width = 12
gap_width = 3
bitmaps = (
# @0 '!' (5 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x70, # OOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x70, # OOO
0x70, # OOO
0x70, # OOO
0x30, # OO
0x20, # O
0x20, # O
0x70, # OOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x70, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
# @24 '"' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xEF, # OOO OOOO
0xE7, # OOO OOO
0xE7, # OOO OOO
0xE7, # OOO OOO
0xE6, # OOO OO
0x66, # OO OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @48 '#' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x06, 0x30, # OO OO
0x06, 0x30, # OO OO
0x06, 0x30, # OO OO
0x0E, 0x70, # OOO OOO
0x3F, 0x7C, # OOOOOO OOOOO
0x7F, 0x7C, # OOOOOOO OOOOO
0x0C, 0x60, # OO OO
0x0C, 0x60, # OO OO
0x18, 0xC0, # OO OO
0x18, 0xC0, # OO OO
0xFE, 0xF8, # OOOOOOO OOOOO
0xFE, 0xF0, # OOOOOOO OOOO
0x39, 0x80, # OOO OO
0x31, 0x80, # OO OO
0x31, 0x80, # OO OO
0x31, 0x80, # OO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @96 '$' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x04, 0x00, # O
0x04, 0x00, # O
0x0F, 0x00, # OOOO
0x3D, 0x80, # OOOO OO
0x35, 0xC0, # OO O OOO
0x75, 0xC0, # OOO O OOO
0x79, 0xC0, # OOOO OOO
0x7E, 0x00, # OOOOOO
0x7F, 0x80, # OOOOOOOO
0x3F, 0xC0, # OOOOOOOO
0x3F, 0xC0, # OOOOOOOO
0x0F, 0xE0, # OOOOOOO
0x63, 0xE0, # OO OOOOO
0xF5, 0xE0, # OOOO O OOOO
0xF4, 0xE0, # OOOO O OOO
0xE4, 0xC0, # OOO O OO
0x75, 0xC0, # OOO O OOO
0x1F, 0x00, # OOOOO
0x04, 0x00, # O
0x04, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
# @144 '%' (17 pixels wide)
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x36, 0x08, 0x00, # OO OO O
0x63, 0x18, 0x00, # OO OO OO
0xE3, 0x90, 0x00, # OOO OOO O
0xE3, 0xB0, 0x00, # OOO OOO OO
0xE3, 0xA0, 0x00, # OOO OOO O
0x63, 0x60, 0x00, # OO OO OO
0x36, 0x40, 0x00, # OO OO O
0x00, 0xC0, 0x00, # OO
0x00, 0x80, 0x00, # O
0x01, 0x9E, 0x00, # OO OOOO
0x01, 0x33, 0x00, # O OO OO
0x03, 0x73, 0x80, # OO OOO OOO
0x02, 0x73, 0x80, # O OOO OOO
0x06, 0x73, 0x80, # OO OOO OOO
0x04, 0x33, 0x00, # O OO OO
0x0C, 0x1E, 0x00, # OO OOOO
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
# @216 '&' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x07, 0xA0, # OOOO O
0x0F, 0x10, # OOOO O
0x1F, 0x10, # OOOOO O
0x1F, 0x10, # OOOOO O
0x1F, 0xA0, # OOOOOO O
0x1F, 0x80, # OOOOOO
0x0F, 0xCC, # OOOOOO OO
0x3F, 0xCC, # OOOOOOOO OO
0x77, 0xEC, # OOO OOOOOO OO
0x77, 0xE8, # OOO OOOOOO O
0xFB, 0xF0, # OOOOO OOOOOO
0xF9, 0xF8, # OOOOO OOOOOO
0xFD, 0xF8, # OOOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7E, 0xFC, # OOOOOO OOOOOO
0x1F, 0x7E, # OOOOO OOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @264 ''' (3 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0x60, # OO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @288 '(' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x0C, # OO
0x1C, # OOO
0x1C, # OOO
0x38, # OOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x38, # OOO
0x1C, # OOO
0x1C, # OOO
0x0C, # OO
# @312 ')' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xC0, # OO
0xE0, # OOO
0xE0, # OOO
0x70, # OOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x70, # OOO
0xE0, # OOO
0xE0, # OOO
0xC0, # OO
# @336 '*' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x1E, 0x00, # OOOO
0x5F, 0x80, # O OOOOOO
0xEF, 0x80, # OOO OOOOO
0xFB, 0x80, # OOOOO OOO
0x0C, 0x00, # OO
0x37, 0x00, # OO OOO
0x77, 0x00, # OOO OOO
0x36, 0x00, # OO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @384 '+' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @432 ',' (4 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x60, # OO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0x70, # OOO
0x20, # O
0x60, # OO
0xC0, # OO
# @456 '-' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0xFC, # OOOOOO
0xFC, # OOOOOO
0xFC, # OOOOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @480 '.' (5 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x70, # OOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x70, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
# @504 '/' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x01, 0xC0, # OOO
0x01, 0xC0, # OOO
0x03, 0x80, # OOO
0x03, 0x80, # OOO
0x03, 0x80, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x06, 0x00, # OO
0x0E, 0x00, # OOO
0x0E, 0x00, # OOO
0x0C, 0x00, # OO
0x1C, 0x00, # OOO
0x1C, 0x00, # OOO
0x38, 0x00, # OOO
0x38, 0x00, # OOO
0x38, 0x00, # OOO
0x70, 0x00, # OOO
0x70, 0x00, # OOO
0xF0, 0x00, # OOOO
0x00, 0x00, #
# @552 '0' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x1B, 0x00, # OO OO
0x39, 0xC0, # OOO OOO
0x79, 0xC0, # OOOO OOO
0x79, 0xE0, # OOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xE0, # OOOOO OOOO
0x79, 0xE0, # OOOO OOOO
0x79, 0xC0, # OOOO OOO
0x39, 0xC0, # OOO OOO
0x1B, 0x00, # OO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @600 '1' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xF8, # OOOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0x78, # OOOO
0xFC, # OOOOOO
0x00, #
0x00, #
0x00, #
0x00, #
# @624 '2' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x37, 0x00, # OO OOO
0x63, 0xC0, # OO OOOO
0xF3, 0xE0, # OOOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0x73, 0xE0, # OOO OOOOO
0x03, 0xE0, # OOOOO
0x07, 0xC0, # OOOOO
0x0F, 0x80, # OOOOO
0x1C, 0x00, # OOO
0x30, 0x00, # OO
0x40, 0x20, # O O
0x3F, 0xE0, # OOOOOOOOO
0x7F, 0xE0, # OOOOOOOOOO
0xFF, 0xC0, # OOOOOOOOOO
0x8F, 0x80, # O OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @672 '3' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x33, 0x80, # OO OOO
0x61, 0xC0, # OO OOO
0xF1, 0xE0, # OOOO OOOO
0xF1, 0xE0, # OOOO OOOO
0xF1, 0xE0, # OOOO OOOO
0x61, 0xE0, # OO OOOO
0x01, 0xC0, # OOO
0x07, 0x80, # OOOO
0x01, 0xC0, # OOO
0x01, 0xE0, # OOOO
0x61, 0xF0, # OO OOOOO
0xF1, 0xF0, # OOOO OOOOO
0xF1, 0xF0, # OOOO OOOOO
0xF1, 0xF0, # OOOO OOOOO
0x61, 0xE0, # OO OOOO
0x33, 0x80, # OO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @720 '4' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x01, 0x80, # OO
0x03, 0x80, # OOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x0F, 0x80, # OOOOO
0x1F, 0x80, # OOOOOO
0x37, 0x80, # OO OOOO
0x27, 0x80, # O OOOO
0x47, 0x80, # O OOOO
0xC7, 0x80, # OO OOOO
0xFF, 0xE0, # OOOOOOOOOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x0F, 0xC0, # OOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @768 '5' (11 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x7F, 0xC0, # OOOOOOOOO
0x7F, 0x80, # OOOOOOOO
0xFF, 0x00, # OOOOOOOO
0xFE, 0x00, # OOOOOOO
0x80, 0x00, # O
0xC0, 0x00, # OO
0xF7, 0x80, # OOOO OOOO
0xF7, 0xC0, # OOOO OOOOO
0xC3, 0xE0, # OO OOOOO
0x03, 0xE0, # OOOOO
0x63, 0xE0, # OO OOOOO
0xF3, 0xE0, # OOOO OOOOO
0xF3, 0xE0, # OOOO OOOOO
0xF3, 0xC0, # OOOO OOOO
0x63, 0x80, # OO OOO
0x37, 0x00, # OO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @816 '6' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x0D, 0xC0, # OO OOO
0x38, 0x60, # OOO OO
0x38, 0xF0, # OOO OOOO
0x79, 0xF0, # OOOO OOOOO
0x78, 0xE0, # OOOO OOO
0xF8, 0x00, # OOOOO
0xF9, 0xC0, # OOOOO OOO
0xFB, 0xE0, # OOOOO OOOOO
0xFD, 0xF0, # OOOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0x79, 0xF0, # OOOO OOOOO
0x79, 0xF0, # OOOO OOOOO
0x79, 0xE0, # OOOO OOOO
0x39, 0xE0, # OOO OOOO
0x0D, 0x80, # OO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @864 '7' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xBE, 0x40, # O OOOOO O
0xFF, 0xC0, # OOOOOOOOOO
0xFF, 0x80, # OOOOOOOOO
0xFF, 0x00, # OOOOOOOO
0x80, 0x00, # O
0x03, 0x00, # OO
0x03, 0x00, # OO
0x06, 0x00, # OO
0x0E, 0x00, # OOO
0x1E, 0x00, # OOOO
0x1E, 0x00, # OOOO
0x3E, 0x00, # OOOOO
0x3E, 0x00, # OOOOO
0x3E, 0x00, # OOOOO
0x3E, 0x00, # OOOOO
0x1C, 0x00, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @912 '8' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x3B, 0x80, # OOO OOO
0x79, 0xC0, # OOOO OOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0x79, 0xC0, # OOOO OOO
0x1B, 0x00, # OO OO
0x3B, 0xC0, # OOO OOOO
0x79, 0xE0, # OOOO OOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0x79, 0xE0, # OOOO OOOO
0x3F, 0xC0, # OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @960 '9' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x3B, 0x00, # OOO OO
0x79, 0xC0, # OOOO OOO
0x79, 0xE0, # OOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xF0, # OOOOO OOOOO
0xF9, 0xF0, # OOOOO OOOOO
0x7D, 0xF0, # OOOOO OOOOO
0x3D, 0xF0, # OOOO OOOOO
0x01, 0xF0, # OOOOO
0x71, 0xF0, # OOO OOOOO
0xF9, 0xE0, # OOOOO OOOO
0xF9, 0xE0, # OOOOO OOOO
0xF1, 0xE0, # OOOO OOOO
0x71, 0xC0, # OOO OOO
0x3B, 0x00, # OOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1008 ':' (5 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x70, # OOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x70, # OOO
0x00, #
0x00, #
0x70, # OOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x70, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
# @1032 ';' (5 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x70, # OOO
0xF8, # OOOOO
0xF8, # OOOOO
0xF8, # OOOOO
0x70, # OOO
0x00, #
0x00, #
0x00, #
0x60, # OO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0x70, # OOO
0x20, # O
0x60, # OO
0xC0, # OO
# @1056 '<' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x40, # O
0x01, 0xC0, # OOO
0x07, 0x00, # OOO
0x0C, 0x00, # OO
0x38, 0x00, # OOO
0xF8, 0x00, # OOOOO
0xFE, 0x00, # OOOOOOO
0x3F, 0x00, # OOOOOO
0x1F, 0xC0, # OOOOOOO
0x07, 0xC0, # OOOOO
0x03, 0xC0, # OOOO
0x00, 0xC0, # OO
0x00, 0x40, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1104 '=' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1152 '>' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x80, 0x00, # O
0xE0, 0x00, # OOO
0xF0, 0x00, # OOOO
0xFC, 0x00, # OOOOOO
0x7E, 0x00, # OOOOOO
0x3F, 0x80, # OOOOOOO
0x0F, 0xC0, # OOOOOO
0x07, 0x80, # OOOO
0x06, 0x00, # OO
0x1C, 0x00, # OOO
0x30, 0x00, # OO
0xE0, 0x00, # OOO
0x80, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1200 '?' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x76, 0x00, # OOO OO
0xF7, 0x00, # OOOO OOO
0xF7, 0x80, # OOOO OOOO
0x67, 0x80, # OO OOOO
0x07, 0x80, # OOOO
0x1F, 0x80, # OOOOOO
0x3F, 0x80, # OOOOOOO
0x3F, 0x00, # OOOOOO
0x3E, 0x00, # OOOOO
0x30, 0x00, # OO
0x20, 0x00, # O
0x38, 0x00, # OOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x38, 0x00, # OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1248 '@' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x07, 0xE0, # OOOOOO
0x1C, 0x18, # OOO OO
0x30, 0x04, # OO O
0x60, 0x02, # OO O
0x41, 0xB9, # O OO OOO O
0xC3, 0x31, # OO OO OO O
0x83, 0x31, # O OO OO O
0x87, 0x71, # O OOO OOO O
0x87, 0x71, # O OOO OOO O
0x87, 0x72, # O OOO OOO O
0x87, 0x76, # O OOO OOO OO
0x43, 0x7C, # O OO OOOOO
0x60, 0x00, # OO
0x30, 0x00, # OO
0x18, 0x08, # OO O
0x07, 0xFC, # OOOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1296 'A' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x03, 0xE0, # OOOOO
0x07, 0xE0, # OOOOOO
0x03, 0xE0, # OOOOO
0x0B, 0xE0, # O OOOOO
0x0B, 0xE0, # O OOOOO
0x0B, 0xF0, # O OOOOOO
0x09, 0xF0, # O OOOOO
0x09, 0xF0, # O OOOOO
0x11, 0xF0, # O OOOOO
0x11, 0xF8, # O OOOOOO
0x11, 0xF8, # O OOOOOO
0x17, 0xF8, # O OOOOOOOO
0x20, 0xF8, # O OOOOO
0x20, 0xFC, # O OOOOOO
0x70, 0xFC, # OOO OOOOOO
0xF9, 0xFE, # OOOOO OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1344 'B' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFD, 0xF0, # OOOOOO OOOOO
0x7C, 0xF8, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7D, 0xE0, # OOOOO OOOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0xF8, # OOOOO OOOOO
0xFD, 0xF0, # OOOOOO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1392 'C' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x06, 0xF8, # OO OOOOO
0x1C, 0x78, # OOO OOOO
0x3C, 0x38, # OOOO OOO
0x7C, 0x18, # OOOOO OO
0x7C, 0x18, # OOOOO OO
0xFC, 0x08, # OOOOOO O
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x08, # OOOOOO O
0x7C, 0x18, # OOOOO OO
0x7C, 0x10, # OOOOO O
0x3C, 0x70, # OOOO OOO
0x0E, 0xE0, # OOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1440 'D' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0xC0, # OOOOOO OO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x70, # OOOOO OOO
0x7C, 0xF0, # OOOOO OOOO
0xFC, 0xC0, # OOOOOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1488 'E' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFD, 0xF0, # OOOOOO OOOOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0x50, # OOOOO O O
0x7C, 0xC0, # OOOOO OO
0x7C, 0xC0, # OOOOO OO
0x7D, 0xC0, # OOOOO OOO
0x7D, 0xC0, # OOOOO OOO
0x7C, 0xC0, # OOOOO OO
0x7C, 0x40, # OOOOO O
0x7C, 0x10, # OOOOO O
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0xF0, # OOOOO OOOO
0xFD, 0xF0, # OOOOOO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1536 'F' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFD, 0xF0, # OOOOOO OOOOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0x50, # OOOOO O O
0x7C, 0xC0, # OOOOO OO
0x7C, 0xC0, # OOOOO OO
0x7D, 0xC0, # OOOOO OOO
0x7D, 0xC0, # OOOOO OOO
0x7C, 0xC0, # OOOOO OO
0x7C, 0x40, # OOOOO O
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0xFE, 0x00, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1584 'G' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x0E, 0xE4, # OOO OOO O
0x1C, 0x3C, # OOO OOOO
0x3C, 0x1C, # OOOO OOO
0x7C, 0x0C, # OOOOO OO
0x7C, 0x0C, # OOOOO OO
0xFC, 0x04, # OOOOOO O
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0x78, # OOOOOO OOOO
0xFC, 0x78, # OOOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x3C, 0x78, # OOOO OOOO
0x1C, 0x78, # OOO OOOO
0x0E, 0xE0, # OOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1632 'H' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x7F, # OOOOOOO OOOOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0xFE, # OOOOO OOOOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0xFE, 0x7F, # OOOOOOO OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1680 'I' (7 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xFE, # OOOOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0xFE, # OOOOOOO
0x00, #
0x00, #
0x00, #
0x00, #
# @1704 'J' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x07, 0xF0, # OOOOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x73, 0xE0, # OOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0x63, 0xC0, # OO OOOO
0x37, 0x80, # OO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1752 'K' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x7E, # OOOOOOO OOOOOO
0x7C, 0x3C, # OOOOO OOOO
0x7C, 0x18, # OOOOO OO
0x7C, 0x10, # OOOOO O
0x7C, 0x20, # OOOOO O
0x7C, 0x70, # OOOOO OOO
0x7C, 0xF0, # OOOOO OOOO
0x7D, 0xF0, # OOOOO OOOOO
0x7D, 0xF8, # OOOOO OOOOOO
0x7D, 0xF8, # OOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7C, 0x7E, # OOOOO OOOOOO
0x7C, 0x7E, # OOOOO OOOOOO
0xFE, 0xFF, # OOOOOOO OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1800 'L' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x00, # OOOOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x10, # OOOOO O
0x7C, 0x10, # OOOOO O
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0xF0, # OOOOO OOOO
0xFC, 0xF0, # OOOOOO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1848 'M' (18 pixels wide)
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0xFE, 0x0F, 0xC0, # OOOOOOO OOOOOO
0x7E, 0x0F, 0x80, # OOOOOO OOOOO
0x3F, 0x0F, 0x80, # OOOOOO OOOOO
0x3F, 0x4F, 0x80, # OOOOOO O OOOOO
0x1F, 0x4F, 0x80, # OOOOO O OOOOO
0x1F, 0xCF, 0x80, # OOOOOOO OOOOO
0x1F, 0xCF, 0x80, # OOOOOOO OOOOO
0x1F, 0xCF, 0x80, # OOOOOOO OOOOO
0x1F, 0x8F, 0x80, # OOOOOO OOOOO
0x1F, 0x8F, 0x80, # OOOOOO OOOOO
0x17, 0x8F, 0x80, # O OOOO OOOOO
0x17, 0x8F, 0x80, # O OOOO OOOOO
0x17, 0x0F, 0x80, # O OOO OOOOO
0x13, 0x0F, 0x80, # O OO OOOOO
0x3B, 0x0F, 0x80, # OOO OO OOOOO
0x7E, 0x1F, 0xC0, # OOOOOO OOOOOOO
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
# @1920 'N' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x3E, # OOOOOOO OOOOO
0x7F, 0x1C, # OOOOOOO OOO
0x3F, 0x08, # OOOOOO O
0x3F, 0x88, # OOOOOOO O
0x5F, 0x88, # O OOOOOO O
0x2F, 0xC8, # O OOOOOO O
0x2F, 0xE8, # O OOOOOOO O
0x27, 0xE8, # O OOOOOO O
0x27, 0xF8, # O OOOOOOOO
0x23, 0xF0, # O OOOOOO
0x21, 0xF8, # O OOOOOO
0x21, 0xF8, # O OOOOOO
0x20, 0xF8, # O OOOOO
0x20, 0xF8, # O OOOOO
0x70, 0x78, # OOO OOOO
0xF8, 0x38, # OOOOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @1968 'O' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x0E, 0xE0, # OOO OOO
0x1C, 0x70, # OOO OOO
0x3C, 0x78, # OOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x3C, 0x78, # OOOO OOOO
0x1C, 0x70, # OOO OOO
0x0E, 0xE0, # OOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2016 'P' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0xE0, # OOOOOO OOO
0x7C, 0xF8, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7D, 0xF0, # OOOOO OOOOO
0x7D, 0xC0, # OOOOO OOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0xFE, 0x00, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2064 'Q' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x1D, 0xC0, # OOO OOO
0x3C, 0xE0, # OOOO OOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0xF8, # OOOOO OOOOO
0xFC, 0xF8, # OOOOOO OOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xF8, # OOOOOO OOOOO
0x7C, 0xF8, # OOOOO OOOOO
0x7F, 0xF0, # OOOOOOOOOOO
0x3D, 0xE0, # OOOO OOOO
0x1D, 0xFC, # OOO OOOOOOO
0x00, 0x78, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2112 'R' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0xF0, # OOOOOO OOOO
0x7C, 0xF8, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0xF0, # OOOOO OOOO
0x7D, 0xF0, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7F, # OOOOO OOOOOOO
0x7C, 0x7F, # OOOOO OOOOOOO
0xFE, 0x3C, # OOOOOOO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2160 'S' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x3B, 0xE0, # OOO OOOOO
0x71, 0xE0, # OOO OOOO
0x70, 0x60, # OOO OO
0xF0, 0x60, # OOOO OO
0xF8, 0x20, # OOOOO O
0xFF, 0x00, # OOOOOOOO
0xFF, 0xC0, # OOOOOOOOOO
0x7F, 0xE0, # OOOOOOOOOO
0x7F, 0xE0, # OOOOOOOOOO
0x1F, 0xF0, # OOOOOOOOO
0x87, 0xF0, # O OOOOOOO
0xC1, 0xF0, # OO OOOOO
0xC0, 0xF0, # OO OOOO
0xE0, 0xE0, # OOO OOO
0xF1, 0xE0, # OOOO OOOO
0x9B, 0x80, # O OO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2208 'T' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xE7, 0xDC, # OOO OOOOO OOO
0xE7, 0xDC, # OOO OOOOO OOO
0xC7, 0xCC, # OO OOOOO OO
0xC7, 0xCC, # OO OOOOO OO
0x87, 0xC4, # O OOOOO O
0x87, 0xC4, # O OOOOO O
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x0F, 0xE0, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2256 'U' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x3E, # OOOOOOO OOOOO
0x7C, 0x1C, # OOOOO OOO
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x18, # OOOOO OO
0x3E, 0x30, # OOOOO OO
0x1E, 0x60, # OOOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2304 'V' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0x3C, # OOOOOOOO OOOO
0x7E, 0x18, # OOOOOO OO
0x7E, 0x10, # OOOOOO O
0x3E, 0x10, # OOOOO O
0x3F, 0x30, # OOOOOO OO
0x3F, 0x20, # OOOOOO O
0x1F, 0x20, # OOOOO O
0x1F, 0x20, # OOOOO O
0x1F, 0xA0, # OOOOOO O
0x1F, 0xC0, # OOOOOOO
0x0F, 0x80, # OOOOO
0x0F, 0xC0, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2352 'W' (19 pixels wide)
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0xFF, 0x39, 0xE0, # OOOOOOOO OOO OOOO
0x7E, 0x38, 0xC0, # OOOOOO OOO OO
0x7E, 0x38, 0x80, # OOOOOO OOO O
0x3E, 0x7C, 0x80, # OOOOO OOOOO O
0x3F, 0x7C, 0x80, # OOOOOO OOOOO O
0x3F, 0x7D, 0x80, # OOOOOO OOOOO OO
0x3F, 0x7D, 0x00, # OOOOOO OOOOO O
0x1F, 0x7F, 0x00, # OOOOO OOOOOOO
0x1F, 0xBF, 0x00, # OOOOOO OOOOOO
0x1F, 0xBE, 0x00, # OOOOOO OOOOO
0x0F, 0xBE, 0x00, # OOOOO OOOOO
0x0F, 0xBE, 0x00, # OOOOO OOOOO
0x0F, 0x9E, 0x00, # OOOOO OOOO
0x0F, 0x1E, 0x00, # OOOO OOOO
0x07, 0x1E, 0x00, # OOO OOOO
0x07, 0x1C, 0x00, # OOO OOO
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
# @2424 'X' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0x7C, # OOOOOOOO OOOOO
0x7E, 0x38, # OOOOOO OOO
0x7E, 0x30, # OOOOOO OO
0x3F, 0x60, # OOOOOO OO
0x3F, 0x40, # OOOOOO O
0x1F, 0x80, # OOOOOO
0x1F, 0x80, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x07, 0xE0, # OOOOOO
0x0F, 0xE0, # OOOOOOO
0x0B, 0xF0, # O OOOOOO
0x1B, 0xF0, # OO OOOOOO
0x31, 0xF8, # OO OOOOOO
0x71, 0xF8, # OOO OOOOOO
0xF9, 0xFC, # OOOOO OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2472 'Y' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x7C, # OOOOOOO OOOOO
0x7C, 0x38, # OOOOO OOO
0x7C, 0x10, # OOOOO O
0x3E, 0x10, # OOOOO O
0x3E, 0x20, # OOOOO O
0x3F, 0x20, # OOOOOO O
0x1F, 0x40, # OOOOO O
0x1F, 0x80, # OOOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x1F, 0xC0, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2520 'Z' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x7D, 0xF0, # OOOOO OOOOO
0x7B, 0xF0, # OOOO OOOOOO
0x63, 0xE0, # OO OOOOO
0x67, 0xE0, # OO OOOOOO
0x47, 0xE0, # O OOOOOO
0x0F, 0xC0, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x1F, 0x80, # OOOOOO
0x1F, 0x80, # OOOOOO
0x3F, 0x00, # OOOOOO
0x3F, 0x00, # OOOOOO
0x3E, 0x10, # OOOOO O
0x7E, 0x30, # OOOOOO OO
0x7C, 0x30, # OOOOO OO
0xFC, 0xF0, # OOOOOO OOOO
0xFD, 0xF0, # OOOOOO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2568 '[' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xFC, # OOOOOO
0xF8, # OOOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF0, # OOOO
0xF8, # OOOOO
0xFC, # OOOOOO
0x00, #
# @2592 '\' (10 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xE0, 0x00, # OOO
0xE0, 0x00, # OOO
0x70, 0x00, # OOO
0x70, 0x00, # OOO
0x70, 0x00, # OOO
0x38, 0x00, # OOO
0x38, 0x00, # OOO
0x38, 0x00, # OOO
0x1C, 0x00, # OOO
0x1C, 0x00, # OOO
0x0C, 0x00, # OO
0x0E, 0x00, # OOO
0x0E, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x07, 0x00, # OOO
0x03, 0x80, # OOO
0x03, 0x80, # OOO
0x03, 0xC0, # OOOO
0x00, 0x00, #
# @2640 ']' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xFC, # OOOOOO
0x7C, # OOOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x7C, # OOOOO
0xFC, # OOOOOO
0x00, #
# @2664 '^' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x02, 0x00, # O
0x06, 0x00, # OO
0x07, 0x00, # OOO
0x0F, 0x80, # OOOOO
0x0F, 0xC0, # OOOOOO
0x1B, 0xC0, # OO OOOO
0x31, 0xE0, # OO OOOO
0x21, 0xF0, # O OOOOO
0x60, 0xF0, # OO OOOO
0xC0, 0x7C, # OO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2712 '_' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0xF0, # OOOOOOOOOOOO
0xFF, 0xF0, # OOOOOOOOOOOO
# @2760 '`' (6 pixels wide)
0xF0, # OOOO
0x38, # OOO
0x1C, # OOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
# @2784 'a' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x03, 0xE0, # OOOOO
0x07, 0xE0, # OOOOOO
0x03, 0xE0, # OOOOO
0x0B, 0xE0, # O OOOOO
0x0B, 0xE0, # O OOOOO
0x0B, 0xF0, # O OOOOOO
0x09, 0xF0, # O OOOOO
0x09, 0xF0, # O OOOOO
0x11, 0xF0, # O OOOOO
0x11, 0xF8, # O OOOOOO
0x11, 0xF8, # O OOOOOO
0x17, 0xF8, # O OOOOOOOO
0x20, 0xF8, # O OOOOO
0x20, 0xFC, # O OOOOOO
0x70, 0xFC, # OOO OOOOOO
0xF9, 0xFE, # OOOOO OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2832 'b' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFD, 0xF0, # OOOOOO OOOOO
0x7C, 0xF8, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7D, 0xE0, # OOOOO OOOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0xF8, # OOOOO OOOOO
0xFD, 0xF0, # OOOOOO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2880 'c' (13 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x06, 0xF8, # OO OOOOO
0x1C, 0x78, # OOO OOOO
0x3C, 0x38, # OOOO OOO
0x7C, 0x18, # OOOOO OO
0x7C, 0x18, # OOOOO OO
0xFC, 0x08, # OOOOOO O
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0x08, # OOOOOO O
0x7C, 0x18, # OOOOO OO
0x7C, 0x10, # OOOOO O
0x3C, 0x70, # OOOO OOO
0x0E, 0xE0, # OOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2928 'd' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0xC0, # OOOOOO OO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x70, # OOOOO OOO
0x7C, 0xF0, # OOOOO OOOO
0xFC, 0xC0, # OOOOOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @2976 'e' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFD, 0xF0, # OOOOOO OOOOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0x50, # OOOOO O O
0x7C, 0xC0, # OOOOO OO
0x7C, 0xC0, # OOOOO OO
0x7D, 0xC0, # OOOOO OOO
0x7D, 0xC0, # OOOOO OOO
0x7C, 0xC0, # OOOOO OO
0x7C, 0x40, # OOOOO O
0x7C, 0x10, # OOOOO O
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0xF0, # OOOOO OOOO
0xFD, 0xF0, # OOOOOO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3024 'f' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFD, 0xF0, # OOOOOO OOOOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0x50, # OOOOO O O
0x7C, 0xC0, # OOOOO OO
0x7C, 0xC0, # OOOOO OO
0x7D, 0xC0, # OOOOO OOO
0x7D, 0xC0, # OOOOO OOO
0x7C, 0xC0, # OOOOO OO
0x7C, 0x40, # OOOOO O
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0xFE, 0x00, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3072 'g' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x0E, 0xE4, # OOO OOO O
0x1C, 0x3C, # OOO OOOO
0x3C, 0x1C, # OOOO OOO
0x7C, 0x0C, # OOOOO OO
0x7C, 0x0C, # OOOOO OO
0xFC, 0x04, # OOOOOO O
0xFC, 0x00, # OOOOOO
0xFC, 0x00, # OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0x78, # OOOOOO OOOO
0xFC, 0x78, # OOOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x78, # OOOOO OOOO
0x3C, 0x78, # OOOO OOOO
0x1C, 0x78, # OOO OOOO
0x0E, 0xE0, # OOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3120 'h' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x7F, # OOOOOOO OOOOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0xFE, # OOOOO OOOOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0x7C, 0x3E, # OOOOO OOOOO
0xFE, 0x7F, # OOOOOOO OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3168 'i' (7 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xFE, # OOOOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0x7C, # OOOOO
0xFE, # OOOOOOO
0x00, #
0x00, #
0x00, #
0x00, #
# @3192 'j' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x07, 0xF0, # OOOOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x03, 0xE0, # OOOOO
0x73, 0xE0, # OOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0xFB, 0xE0, # OOOOO OOOOO
0x63, 0xC0, # OO OOOO
0x37, 0x80, # OO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3240 'k' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x7E, # OOOOOOO OOOOOO
0x7C, 0x3C, # OOOOO OOOO
0x7C, 0x18, # OOOOO OO
0x7C, 0x10, # OOOOO O
0x7C, 0x20, # OOOOO O
0x7C, 0x70, # OOOOO OOO
0x7C, 0xF0, # OOOOO OOOO
0x7D, 0xF0, # OOOOO OOOOO
0x7D, 0xF8, # OOOOO OOOOOO
0x7D, 0xF8, # OOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7C, 0xFC, # OOOOO OOOOOO
0x7C, 0x7E, # OOOOO OOOOOO
0x7C, 0x7E, # OOOOO OOOOOO
0xFE, 0xFF, # OOOOOOO OOOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3288 'l' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x00, # OOOOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x10, # OOOOO O
0x7C, 0x10, # OOOOO O
0x7C, 0x30, # OOOOO OO
0x7C, 0x30, # OOOOO OO
0x7C, 0xF0, # OOOOO OOOO
0xFC, 0xF0, # OOOOOO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3336 'm' (18 pixels wide)
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0xFE, 0x0F, 0xC0, # OOOOOOO OOOOOO
0x7E, 0x0F, 0x80, # OOOOOO OOOOO
0x3F, 0x0F, 0x80, # OOOOOO OOOOO
0x3F, 0x4F, 0x80, # OOOOOO O OOOOO
0x1F, 0x4F, 0x80, # OOOOO O OOOOO
0x1F, 0xCF, 0x80, # OOOOOOO OOOOO
0x1F, 0xCF, 0x80, # OOOOOOO OOOOO
0x1F, 0xCF, 0x80, # OOOOOOO OOOOO
0x1F, 0x8F, 0x80, # OOOOOO OOOOO
0x1F, 0x8F, 0x80, # OOOOOO OOOOO
0x17, 0x8F, 0x80, # O OOOO OOOOO
0x17, 0x8F, 0x80, # O OOOO OOOOO
0x17, 0x0F, 0x80, # O OOO OOOOO
0x13, 0x0F, 0x80, # O OO OOOOO
0x3B, 0x0F, 0x80, # OOO OO OOOOO
0x7E, 0x1F, 0xC0, # OOOOOO OOOOOOO
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
# @3408 'n' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x3E, # OOOOOOO OOOOO
0x7F, 0x1C, # OOOOOOO OOO
0x3F, 0x08, # OOOOOO O
0x3F, 0x88, # OOOOOOO O
0x5F, 0x88, # O OOOOOO O
0x2F, 0xC8, # O OOOOOO O
0x2F, 0xE8, # O OOOOOOO O
0x27, 0xE8, # O OOOOOO O
0x27, 0xF8, # O OOOOOOOO
0x23, 0xF0, # O OOOOOO
0x21, 0xF8, # O OOOOOO
0x21, 0xF8, # O OOOOOO
0x20, 0xF8, # O OOOOO
0x20, 0xF8, # O OOOOO
0x70, 0x78, # OOO OOOO
0xF8, 0x38, # OOOOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3456 'o' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x0E, 0xE0, # OOO OOO
0x1C, 0x70, # OOO OOO
0x3C, 0x78, # OOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0xFC, 0x7E, # OOOOOO OOOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x3C, 0x78, # OOOO OOOO
0x1C, 0x70, # OOO OOO
0x0E, 0xE0, # OOO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3504 'p' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0xE0, # OOOOOO OOO
0x7C, 0xF8, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7D, 0xF0, # OOOOO OOOOO
0x7D, 0xC0, # OOOOO OOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0x7C, 0x00, # OOOOO
0xFE, 0x00, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3552 'q' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x1D, 0xC0, # OOO OOO
0x3C, 0xE0, # OOOO OOO
0x7C, 0xF0, # OOOOO OOOO
0x7C, 0xF8, # OOOOO OOOOO
0xFC, 0xF8, # OOOOOO OOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xFC, # OOOOOO OOOOOO
0xFC, 0xF8, # OOOOOO OOOOO
0x7C, 0xF8, # OOOOO OOOOO
0x7F, 0xF0, # OOOOOOOOOOO
0x3D, 0xE0, # OOOO OOOO
0x1D, 0xFC, # OOO OOOOOOO
0x00, 0x78, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3600 'r' (16 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFC, 0xF0, # OOOOOO OOOO
0x7C, 0xF8, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x78, # OOOOO OOOO
0x7C, 0xF0, # OOOOO OOOO
0x7D, 0xF0, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7C, # OOOOO OOOOO
0x7C, 0x7F, # OOOOO OOOOOOO
0x7C, 0x7F, # OOOOO OOOOOOO
0xFE, 0x3C, # OOOOOOO OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3648 's' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x3B, 0xE0, # OOO OOOOO
0x71, 0xE0, # OOO OOOO
0x70, 0x60, # OOO OO
0xF0, 0x60, # OOOO OO
0xF8, 0x20, # OOOOO O
0xFF, 0x00, # OOOOOOOO
0xFF, 0xC0, # OOOOOOOOOO
0x7F, 0xE0, # OOOOOOOOOO
0x7F, 0xE0, # OOOOOOOOOO
0x1F, 0xF0, # OOOOOOOOO
0x87, 0xF0, # O OOOOOOO
0xC1, 0xF0, # OO OOOOO
0xC0, 0xF0, # OO OOOO
0xE0, 0xE0, # OOO OOO
0xF1, 0xE0, # OOOO OOOO
0x9B, 0x80, # O OO OOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3696 't' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xE7, 0xDC, # OOO OOOOO OOO
0xE7, 0xDC, # OOO OOOOO OOO
0xC7, 0xCC, # OO OOOOO OO
0xC7, 0xCC, # OO OOOOO OO
0x87, 0xC4, # O OOOOO O
0x87, 0xC4, # O OOOOO O
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x07, 0xC0, # OOOOO
0x0F, 0xE0, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3744 'u' (15 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x3E, # OOOOOOO OOOOO
0x7C, 0x1C, # OOOOO OOO
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x08, # OOOOO O
0x7C, 0x18, # OOOOO OO
0x3E, 0x30, # OOOOO OO
0x1E, 0x60, # OOOO OO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3792 'v' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0x3C, # OOOOOOOO OOOO
0x7E, 0x18, # OOOOOO OO
0x7E, 0x10, # OOOOOO O
0x3E, 0x10, # OOOOO O
0x3F, 0x30, # OOOOOO OO
0x3F, 0x20, # OOOOOO O
0x1F, 0x20, # OOOOO O
0x1F, 0x20, # OOOOO O
0x1F, 0xA0, # OOOOOO O
0x1F, 0xC0, # OOOOOOO
0x0F, 0x80, # OOOOO
0x0F, 0xC0, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x07, 0x80, # OOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3840 'w' (19 pixels wide)
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0xFF, 0x39, 0xE0, # OOOOOOOO OOO OOOO
0x7E, 0x38, 0xC0, # OOOOOO OOO OO
0x7E, 0x38, 0x80, # OOOOOO OOO O
0x3E, 0x7C, 0x80, # OOOOO OOOOO O
0x3F, 0x7C, 0x80, # OOOOOO OOOOO O
0x3F, 0x7D, 0x80, # OOOOOO OOOOO OO
0x3F, 0x7D, 0x00, # OOOOOO OOOOO O
0x1F, 0x7F, 0x00, # OOOOO OOOOOOO
0x1F, 0xBF, 0x00, # OOOOOO OOOOOO
0x1F, 0xBE, 0x00, # OOOOOO OOOOO
0x0F, 0xBE, 0x00, # OOOOO OOOOO
0x0F, 0xBE, 0x00, # OOOOO OOOOO
0x0F, 0x9E, 0x00, # OOOOO OOOO
0x0F, 0x1E, 0x00, # OOOO OOOO
0x07, 0x1E, 0x00, # OOO OOOO
0x07, 0x1C, 0x00, # OOO OOO
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
0x00, 0x00, 0x00, #
# @3912 'x' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFF, 0x7C, # OOOOOOOO OOOOO
0x7E, 0x38, # OOOOOO OOO
0x7E, 0x30, # OOOOOO OO
0x3F, 0x60, # OOOOOO OO
0x3F, 0x40, # OOOOOO O
0x1F, 0x80, # OOOOOO
0x1F, 0x80, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x07, 0xE0, # OOOOOO
0x0F, 0xE0, # OOOOOOO
0x0B, 0xF0, # O OOOOOO
0x1B, 0xF0, # OO OOOOOO
0x31, 0xF8, # OO OOOOOO
0x71, 0xF8, # OOO OOOOOO
0xF9, 0xFC, # OOOOO OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @3960 'y' (14 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0xFE, 0x7C, # OOOOOOO OOOOO
0x7C, 0x38, # OOOOO OOO
0x7C, 0x10, # OOOOO O
0x3E, 0x10, # OOOOO O
0x3E, 0x20, # OOOOO O
0x3F, 0x20, # OOOOOO O
0x1F, 0x40, # OOOOO O
0x1F, 0x80, # OOOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x0F, 0x80, # OOOOO
0x1F, 0xC0, # OOOOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @4008 'z' (12 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x7D, 0xF0, # OOOOO OOOOO
0x7B, 0xF0, # OOOO OOOOOO
0x63, 0xE0, # OO OOOOO
0x67, 0xE0, # OO OOOOOO
0x47, 0xE0, # O OOOOOO
0x0F, 0xC0, # OOOOOO
0x0F, 0xC0, # OOOOOO
0x1F, 0x80, # OOOOOO
0x1F, 0x80, # OOOOOO
0x3F, 0x00, # OOOOOO
0x3F, 0x00, # OOOOOO
0x3E, 0x10, # OOOOO O
0x7E, 0x30, # OOOOOO OO
0x7C, 0x30, # OOOOO OO
0xFC, 0xF0, # OOOOOO OOOO
0xFD, 0xF0, # OOOOOO OOOOO
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @4056 '{' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x1F, # OOOOO
0x3E, # OOOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0xF0, # OOOO
0x7C, # OOOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3E, # OOOOO
0x1F, # OOOOO
0x00, #
# @4080 '|' (3 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
0xE0, # OOO
# @4104 '}' (8 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0xF8, # OOOOO
0x7C, # OOOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x0F, # OOOO
0x1E, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x3C, # OOOO
0x7C, # OOOOO
0xF8, # OOOOO
0x00, #
# @4128 '~' (9 pixels wide)
0x00, 0x00, #
0x00, 0x00, #
0x38, 0x80, # OOO O
0x7F, 0x80, # OOOOOOOO
0x8F, 0x00, # O OOOO
0x80, 0x00, # O
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
0x00, 0x00, #
# @4176 '°' (6 pixels wide)
0x00, #
0x00, #
0x00, #
0x00, #
0x30, # OO
0x78, # OOOO
0xCC, # OO OO
0xCC, # OO OO
0xCC, # OO OO
0xFC, # OOOOOO
0x78, # OOOO
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
0x00, #
)
descriptors = (
(5,0),# !
(8,24),# "
(14,48),# #
(11,96),# $
(17,144),# %
(15,216),# &
(3,264),# '
(6,288),# (
(6,312),# )
(9,336),# *
(12,384),# +
(4,432),# ,
(6,456),# -
(5,480),# .
(10,504),# /
(12,552),# 0
(6,600),# 1
(11,624),# 2
(12,672),# 3
(11,720),# 4
(11,768),# 5
(12,816),# 6
(10,864),# 7
(12,912),# 8
(12,960),# 9
(5,1008),# :
(5,1032),# ;
(10,1056),# <
(12,1104),# =
(10,1152),# >
(9,1200),# ?
(16,1248),# @
(15,1296),# A
(14,1344),# B
(13,1392),# C
(14,1440),# D
(12,1488),# E
(12,1536),# F
(14,1584),# G
(16,1632),# H
(7,1680),# I
(12,1704),# J
(16,1752),# K
(12,1800),# L
(18,1848),# M
(15,1920),# N
(15,1968),# O
(14,2016),# P
(14,2064),# Q
(16,2112),# R
(12,2160),# S
(14,2208),# T
(15,2256),# U
(14,2304),# V
(19,2352),# W
(14,2424),# X
(14,2472),# Y
(12,2520),# Z
(6,2568),# [
(10,2592),# \
(6,2640),# ]
(14,2664),# ^
(12,2712),# _
(6,2760),# `
(15,2784),# a
(14,2832),# b
(13,2880),# c
(14,2928),# d
(12,2976),# e
(12,3024),# f
(14,3072),# g
(16,3120),# h
(7,3168),# i
(12,3192),# j
(16,3240),# k
(12,3288),# l
(18,3336),# m
(15,3408),# n
(15,3456),# o
(14,3504),# p
(14,3552),# q
(16,3600),# r
(12,3648),# s
(14,3696),# t
(15,3744),# u
(14,3792),# v
(19,3840),# w
(14,3912),# x
(14,3960),# y
(12,4008),# z
(8,4056),# {
(3,4080),# |
(8,4104),# }
(9,4128),# ~
(6,4176),# °
)
kerning = (
(5,5,4,5,5,5,5,5,5,5,4,5,4,5,3,5,4,5,5,4,5,4,5,5,5,5,5,4,5,5,5,4,4,4,4,4,4,4,4,4,4,5,4,4,4,4,4,4,5,4,5,5,4,4,4,4,4,5,5,5,4,4,0,0,4,4,4,4,4,4,4,4,4,5,4,4,4,4,4,4,5,4,5,5,4,4,4,4,4,5,3,5,4,5,5,),
(8,8,6,7,8,5,8,6,8,8,3,4,2,3,2,7,8,8,8,3,8,7,8,8,8,7,7,5,7,8,8,7,4,8,7,8,8,8,7,8,8,3,8,8,8,8,7,8,7,8,8,8,8,8,8,8,8,7,8,8,8,5,0,2,4,8,7,8,8,8,7,8,8,3,8,8,8,8,7,8,7,8,8,8,8,8,8,8,8,7,6,8,8,8,8,),
(14,14,13,13,14,13,14,13,13,12,11,10,11,11,9,14,13,14,14,13,14,14,14,14,14,14,14,12,14,14,12,14,10,13,14,13,13,13,14,13,13,12,13,13,12,13,14,13,14,13,14,14,13,12,12,12,12,13,14,13,12,12,2,8,10,13,14,13,13,13,14,13,13,12,13,13,12,13,14,13,14,13,14,14,13,12,12,12,12,13,12,14,12,12,14,),
(11,10,11,11,10,11,10,11,10,10,11,10,11,11,8,11,10,10,11,11,11,11,10,11,11,11,10,10,11,11,10,11,9,10,11,10,10,10,11,10,10,11,10,10,8,9,11,10,11,10,11,10,10,9,9,9,9,10,11,9,9,10,0,5,9,10,11,10,10,10,11,10,10,11,10,10,8,9,11,10,11,10,11,10,10,9,9,9,9,10,11,11,9,9,10,),
(17,13,17,17,12,17,14,17,16,12,15,17,15,17,15,17,16,16,17,17,17,16,15,17,17,17,17,12,17,17,16,16,15,16,17,16,16,16,16,16,16,17,16,16,14,15,16,16,17,16,17,13,16,13,13,15,13,16,17,13,15,9,5,11,15,16,17,16,16,16,16,16,16,17,16,16,14,15,16,16,17,16,17,13,16,13,13,15,13,16,15,17,15,13,12,),
(14,12,13,14,12,13,12,14,14,13,14,15,14,14,13,14,15,15,14,13,14,14,12,14,14,14,15,14,14,14,13,14,15,15,14,15,15,15,14,15,15,14,15,15,14,15,14,15,14,15,15,12,13,11,12,15,12,15,15,12,13,14,3,9,15,15,14,15,15,15,14,15,15,14,15,15,14,15,14,15,14,15,15,12,13,11,12,15,12,15,13,15,13,12,13,),
(3,3,2,2,3,0,3,2,3,3,0,0,0,0,0,3,3,3,3,0,3,3,3,3,3,3,3,1,3,3,3,3,0,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,0,0,0,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,3,3,3,3,),
(6,6,5,5,6,5,6,5,6,6,5,6,5,5,6,5,6,6,6,5,6,5,6,6,5,5,6,5,5,6,6,5,5,6,5,6,6,6,5,6,6,5,6,6,6,6,5,6,5,6,5,6,6,6,6,6,6,5,6,6,6,5,6,0,5,6,5,6,6,6,5,6,6,5,6,6,6,6,5,6,5,6,5,6,6,6,6,6,6,5,5,6,6,6,6,),
(6,5,6,6,5,6,5,6,5,4,6,5,6,6,3,6,5,5,6,6,6,6,5,6,6,6,6,6,6,6,5,6,5,5,6,5,5,5,6,5,5,6,5,5,4,5,6,5,6,5,6,5,5,3,3,5,3,5,6,4,4,6,3,0,5,5,6,5,5,5,6,5,5,6,5,5,4,5,6,5,6,5,6,5,5,3,3,5,3,5,6,6,4,3,5,),
(9,9,7,8,9,6,9,7,9,9,7,5,7,4,3,8,8,9,9,5,9,8,9,9,9,8,8,8,8,9,9,8,5,8,8,8,8,8,8,8,8,3,8,8,8,8,8,8,8,8,9,9,8,8,8,8,8,8,9,9,8,6,0,3,5,8,8,8,8,8,8,8,8,3,8,8,8,8,8,8,8,8,9,9,8,8,8,8,8,8,7,9,8,9,9,),
(11,8,9,10,8,11,9,12,11,10,12,8,12,8,8,12,11,9,8,11,12,12,9,11,11,12,12,12,12,8,10,12,9,11,12,11,11,11,12,11,11,8,11,11,9,10,12,11,12,11,11,8,11,9,9,8,9,10,12,10,10,12,0,6,9,11,12,11,11,11,12,11,11,8,11,11,9,10,12,11,12,11,11,8,11,9,9,8,9,10,12,12,10,3,8,),
(4,0,2,4,0,3,1,3,3,0,0,4,0,4,3,3,4,4,4,0,4,3,2,4,4,4,4,0,0,4,3,2,4,4,3,4,4,4,2,4,4,4,4,4,3,4,2,4,3,4,4,0,3,0,0,4,1,4,4,0,3,0,3,0,4,4,3,4,4,4,2,4,4,4,4,4,3,4,2,4,3,4,4,0,3,0,0,4,1,4,2,4,3,0,0,),
(5,0,3,4,0,5,3,6,5,4,6,2,6,1,2,6,5,3,1,5,6,6,3,5,5,6,6,6,6,2,4,6,3,5,6,5,5,5,6,5,5,0,5,5,3,4,6,5,6,5,5,1,5,3,3,2,3,4,6,4,4,6,0,0,3,5,6,5,5,5,6,5,5,0,5,5,3,4,6,5,6,5,5,1,5,3,3,2,3,4,6,6,4,0,0,),
(5,0,4,5,0,5,2,5,4,0,0,5,0,5,3,4,4,5,5,4,5,4,3,5,5,5,5,0,4,5,4,4,4,4,4,4,4,4,4,4,4,5,4,4,3,4,4,4,4,4,5,0,4,1,1,4,1,5,5,1,3,0,0,0,4,4,4,4,4,4,4,4,4,5,4,4,3,4,4,4,4,4,5,0,4,1,1,4,1,5,3,5,3,0,0,),
(10,10,7,8,9,6,10,8,10,9,7,6,7,6,4,9,10,9,9,6,9,8,10,9,9,8,8,8,9,10,10,8,5,10,8,10,10,10,8,10,10,6,10,10,10,10,8,10,9,10,9,10,10,10,10,10,10,9,10,10,10,7,4,4,5,10,8,10,10,10,8,10,10,6,10,10,10,10,8,10,9,10,9,10,10,10,10,10,10,9,8,10,10,10,9,),
(11,11,12,11,11,12,11,12,11,11,12,10,12,11,8,12,11,11,11,12,12,12,11,12,12,12,12,12,12,11,10,12,9,11,12,11,11,11,12,11,11,11,11,11,9,10,12,11,12,11,12,12,11,10,10,9,10,10,12,10,10,12,0,6,9,11,12,11,11,11,12,11,11,11,11,11,9,10,12,11,12,11,12,12,11,10,10,9,10,10,12,12,10,10,12,),
(5,5,5,5,5,5,5,5,5,5,5,6,5,5,4,5,6,6,5,5,5,5,5,5,5,5,6,5,5,5,5,5,6,6,5,6,6,6,5,6,6,5,6,6,5,6,5,6,5,6,6,5,5,5,5,6,5,6,6,5,5,5,0,0,6,6,5,6,6,6,5,6,6,5,6,6,5,6,5,6,5,6,6,5,5,5,5,6,5,6,5,6,5,5,5,),
(11,11,11,11,11,11,11,11,11,11,10,11,10,11,9,11,10,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,9,10,11,10,10,10,11,10,10,11,10,10,9,10,11,10,11,10,11,11,10,10,10,10,10,10,11,10,9,9,0,5,9,10,11,10,10,10,11,10,10,11,10,10,9,10,11,10,11,10,11,11,10,10,10,10,10,10,9,11,9,10,11,),
(12,11,12,12,11,12,11,12,11,11,11,12,11,12,10,12,11,11,12,12,12,12,11,12,12,12,12,10,12,12,11,12,10,11,12,11,11,11,12,11,11,12,11,11,9,10,12,11,12,11,12,11,11,10,10,10,10,11,12,10,10,10,0,6,10,11,12,11,11,11,12,11,11,12,11,11,9,10,12,11,12,11,12,11,11,10,10,10,10,11,11,12,10,10,11,),
(10,9,11,11,9,11,9,11,10,9,9,10,9,10,8,11,10,10,11,11,11,10,9,11,11,10,10,9,11,9,9,10,10,10,11,10,10,10,10,10,10,11,10,10,9,10,10,10,11,10,11,9,10,9,9,10,9,10,11,9,9,9,0,5,10,10,11,10,10,10,10,10,10,11,10,10,9,10,10,10,11,10,11,9,10,9,9,10,9,10,9,11,9,9,9,),
(11,10,11,11,8,11,10,11,10,8,11,10,11,11,8,11,10,10,11,11,11,11,10,11,11,11,10,10,11,11,10,11,9,10,11,10,10,10,11,10,10,11,10,10,10,10,11,10,11,10,11,10,10,10,10,10,10,10,11,10,10,11,0,5,9,10,11,10,10,10,11,10,10,11,10,10,10,10,11,10,11,10,11,10,10,10,10,10,10,10,11,11,10,10,8,),
(12,12,12,12,12,12,12,12,12,12,12,11,12,12,9,12,11,12,12,12,12,12,12,12,12,12,11,11,12,12,12,12,10,11,12,11,11,11,12,11,11,12,11,11,10,10,12,11,12,11,12,12,11,11,11,11,11,11,12,11,10,12,0,6,10,11,12,11,11,11,12,11,11,12,11,11,10,10,12,11,12,11,12,12,11,11,11,11,11,11,12,12,10,11,12,),
(10,10,7,8,9,7,10,7,10,9,7,7,7,7,5,8,10,9,9,7,9,8,10,9,9,8,8,8,8,10,10,8,6,10,8,10,10,10,8,10,10,7,10,10,10,10,8,10,8,10,9,10,10,10,10,10,10,9,10,10,10,7,0,4,6,10,8,10,10,10,8,10,10,7,10,10,10,10,8,10,8,10,9,10,10,10,10,10,10,9,8,10,10,10,9,),
(12,11,12,12,11,12,11,12,11,11,11,12,11,12,10,12,11,11,12,12,12,12,11,12,12,12,12,10,12,12,11,12,10,11,12,11,11,11,12,11,11,12,11,11,9,10,12,11,12,11,12,11,11,10,10,10,10,11,12,10,10,10,0,6,10,11,12,11,11,11,12,11,11,12,11,11,9,10,12,11,12,11,12,11,11,10,10,10,10,11,11,12,10,10,11,),
(11,11,12,11,11,12,11,12,11,11,12,11,12,11,9,12,11,11,11,12,12,12,11,12,12,12,12,12,12,11,11,12,9,11,12,11,11,11,12,11,11,11,11,11,9,10,12,11,12,11,12,12,11,10,10,10,10,10,12,10,10,12,0,6,9,11,12,11,11,11,12,11,11,11,11,11,9,10,12,11,12,11,12,12,11,10,10,10,10,10,12,12,10,10,12,),
(5,4,4,5,4,5,4,5,4,4,5,5,5,5,3,5,4,5,5,4,5,5,4,5,5,5,5,5,5,5,4,5,4,4,5,4,4,4,5,4,4,5,4,4,3,4,5,4,5,4,5,5,4,3,3,4,3,5,5,3,3,4,0,0,4,4,5,4,4,4,5,4,4,5,4,4,3,4,5,4,5,4,5,5,4,3,3,4,3,5,3,5,3,0,5,),
(4,4,4,4,4,3,4,5,4,4,5,4,5,4,3,5,4,4,4,3,5,5,4,5,5,5,5,5,5,4,3,5,4,4,5,4,4,4,5,4,4,4,4,4,3,4,5,4,5,4,5,5,4,3,3,4,3,4,5,3,3,4,3,0,4,4,5,4,4,4,5,4,4,4,4,4,3,4,5,4,5,4,5,5,4,3,3,4,3,4,3,5,3,0,5,),
(10,10,10,10,10,10,10,10,10,10,10,10,10,10,8,10,9,10,10,10,10,10,10,10,10,10,10,7,10,10,10,10,8,9,10,9,9,9,10,9,9,10,9,9,9,9,10,9,10,9,10,10,9,9,9,9,9,9,10,10,9,8,0,4,8,9,10,9,9,9,10,9,9,10,9,9,9,9,10,9,10,9,10,10,9,9,9,9,9,9,10,10,9,10,10,),
(12,12,12,12,12,12,12,12,11,11,12,8,12,11,9,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,10,12,9,11,12,11,11,11,12,11,11,12,11,11,9,11,12,11,12,11,12,12,11,10,10,10,10,11,12,11,10,10,0,6,9,11,12,11,11,11,12,11,11,12,11,11,9,11,12,11,12,11,12,12,11,10,10,10,10,11,12,12,10,3,12,),
(9,6,6,8,7,8,7,10,9,8,10,6,10,5,5,10,9,6,6,8,10,10,6,8,9,10,10,10,9,7,8,10,6,9,10,9,9,9,10,9,9,5,9,9,7,8,10,9,10,9,9,7,9,7,7,6,7,7,10,8,8,9,0,4,6,9,10,9,9,9,10,9,9,5,9,9,7,8,10,9,10,9,9,7,9,7,7,6,7,7,8,10,8,1,8,),
(9,9,8,8,9,6,9,8,9,9,8,6,8,6,4,9,8,9,9,6,9,9,9,9,9,9,9,9,9,9,9,9,5,8,9,8,8,8,9,8,8,6,8,8,7,8,9,8,9,8,9,9,8,8,8,8,8,8,9,8,7,7,0,3,5,8,9,8,8,8,9,8,8,6,8,8,7,8,9,8,9,8,9,9,8,8,8,8,8,8,7,9,7,8,9,),
(16,16,15,15,16,15,16,16,15,15,16,14,16,13,12,16,15,16,16,15,16,16,16,16,16,16,16,16,16,16,14,16,14,15,16,15,15,15,16,15,15,14,15,15,13,15,16,15,16,15,16,16,15,14,14,14,14,15,16,15,14,16,4,10,14,15,16,15,15,15,16,15,15,14,15,15,13,15,16,15,16,15,16,16,15,14,14,14,14,15,15,16,14,13,16,),
(14,11,13,14,11,13,12,14,14,11,13,15,13,14,13,13,15,15,14,13,14,13,12,14,14,14,15,12,13,14,13,13,15,15,13,15,15,15,13,15,15,14,15,15,14,15,13,15,13,15,15,12,13,11,11,15,12,15,15,11,13,12,3,9,15,15,13,15,15,15,13,15,15,14,15,15,14,15,13,15,13,15,15,12,13,11,11,15,12,15,13,15,13,11,12,),
(14,14,14,14,14,14,14,14,14,14,13,14,13,14,12,14,13,14,14,14,14,14,14,14,14,14,14,13,14,14,14,14,12,13,14,13,13,13,14,13,13,14,13,13,12,13,14,13,14,13,14,14,13,13,13,13,13,13,14,13,12,12,2,8,12,13,14,13,13,13,14,13,13,14,13,13,12,13,14,13,14,13,14,14,13,13,13,13,13,13,13,14,12,13,14,),
(13,13,13,13,13,13,13,13,13,13,8,12,7,13,10,13,13,13,13,13,13,13,13,13,13,13,13,11,13,13,13,13,11,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,12,13,13,13,11,1,7,11,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,12,11,13,13,13,13,),
(13,13,14,13,13,14,13,14,13,13,14,12,14,13,10,14,13,13,13,14,14,14,13,14,14,14,14,14,14,13,13,14,11,13,14,13,13,13,14,13,13,13,13,13,11,12,14,13,14,13,14,14,13,12,12,12,12,12,14,12,12,14,2,8,11,13,14,13,13,13,14,13,13,13,13,13,11,12,14,13,14,13,14,14,13,12,12,12,12,12,14,14,12,12,14,),
(12,12,12,12,12,12,12,12,12,12,10,12,10,12,10,12,12,12,12,12,12,11,12,12,12,12,12,10,12,12,12,11,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,10,0,6,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,10,12,12,12,12,),
(12,12,10,11,12,10,12,11,12,12,10,8,10,7,6,12,12,12,12,10,12,11,12,12,12,11,11,10,12,12,12,11,8,12,11,12,12,12,11,12,12,9,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,11,12,12,12,10,0,6,8,12,11,12,12,12,11,12,12,9,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,11,10,12,12,12,12,),
(14,14,13,13,14,13,14,14,14,14,14,13,14,13,11,14,14,14,14,13,14,14,14,14,14,14,14,12,14,14,14,14,12,14,14,14,14,14,14,14,14,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,14,14,14,14,2,8,12,14,14,14,14,14,14,14,14,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,13,14,14,14,14,),
(15,16,15,15,15,15,16,15,16,15,15,16,15,15,14,15,16,16,15,15,15,15,16,15,15,15,16,15,15,15,15,15,16,16,15,16,16,16,15,16,16,15,16,16,16,16,15,16,15,16,16,16,16,16,16,16,16,16,16,16,16,15,4,10,16,16,15,16,16,16,15,16,16,15,16,16,16,16,15,16,15,16,16,16,16,16,16,16,16,16,15,16,16,16,15,),
(6,7,6,6,6,6,7,6,7,6,6,7,6,6,5,6,7,7,6,6,6,6,7,6,6,6,7,6,6,6,6,6,7,7,6,7,7,7,6,7,7,6,7,7,7,7,6,7,6,7,7,7,7,7,7,7,7,7,7,7,7,6,0,1,7,7,6,7,7,7,6,7,7,6,7,7,7,7,6,7,6,7,7,7,7,7,7,7,7,7,6,7,7,7,6,),
(11,12,11,11,11,11,12,11,12,11,11,11,11,11,9,11,12,11,11,11,11,11,12,11,11,11,11,11,11,11,11,11,9,12,11,12,12,12,11,12,12,11,12,12,12,12,11,12,11,12,11,12,12,12,12,12,12,11,12,12,12,11,0,6,9,12,11,12,12,12,11,12,12,11,12,12,12,12,11,12,11,12,11,12,12,12,12,12,12,11,11,12,12,12,11,),
(15,15,14,15,13,14,15,15,15,13,13,16,13,15,14,14,16,16,15,14,15,14,15,15,15,15,16,12,14,15,14,14,16,16,14,16,16,16,14,16,16,15,16,16,15,16,14,16,14,16,16,15,15,15,15,16,15,16,16,15,15,13,4,10,16,16,14,16,16,16,14,16,16,15,16,16,15,16,14,16,14,16,16,15,15,15,15,16,15,16,14,16,15,15,13,),
(12,7,12,12,8,12,9,12,11,6,7,12,6,12,10,12,12,12,12,12,12,12,10,12,12,12,12,7,12,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,11,12,12,12,12,12,12,8,11,8,8,12,9,12,12,8,10,6,0,6,12,12,12,12,12,12,12,12,12,12,12,12,11,12,12,12,12,12,12,8,11,8,8,12,9,12,11,12,10,7,6,),
(17,18,17,17,17,17,18,17,18,17,17,18,17,17,16,17,18,18,17,17,17,17,18,17,17,17,18,17,17,17,17,17,18,18,17,18,18,18,17,18,18,17,18,18,18,18,17,18,17,18,18,18,18,18,18,18,18,18,18,18,18,17,6,12,18,18,17,18,18,18,17,18,18,17,18,18,18,18,17,18,17,18,18,18,18,18,18,18,18,18,17,18,18,18,17,),
(14,15,13,13,13,13,15,13,15,13,13,13,13,13,11,13,15,13,13,13,14,13,15,13,13,13,13,13,13,14,14,13,13,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,15,15,15,13,3,9,13,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,12,15,15,15,13,),
(14,14,15,14,14,15,14,15,14,14,15,13,15,14,11,15,14,14,14,15,15,15,14,15,15,15,15,15,15,14,13,15,12,14,15,14,14,14,15,14,14,14,14,14,12,13,15,14,15,14,15,15,14,13,13,12,13,13,15,13,13,15,3,9,12,14,15,14,14,14,15,14,14,14,14,14,12,13,15,14,15,14,15,15,14,13,13,12,13,13,15,15,13,12,15,),
(14,14,13,13,14,11,14,13,13,14,13,10,13,9,9,14,13,14,14,11,14,14,14,14,14,14,14,14,14,14,13,14,10,13,14,13,13,13,14,13,13,8,13,13,12,13,14,13,14,13,14,14,13,12,12,12,12,13,14,13,12,12,2,8,10,13,14,13,13,13,14,13,13,8,13,13,12,13,14,13,14,13,14,14,13,12,12,12,12,13,12,14,12,13,14,),
(13,13,14,13,13,14,13,14,13,13,14,14,14,13,12,14,14,14,13,14,14,14,13,14,14,14,14,14,14,13,12,14,14,14,14,14,14,14,14,14,14,13,14,14,13,14,14,14,14,14,14,14,13,12,12,14,12,14,14,12,12,14,2,8,14,14,14,14,14,14,14,14,14,13,14,14,13,14,14,14,14,14,14,14,13,12,12,14,12,14,14,14,12,11,14,),
(16,14,14,16,14,15,14,15,15,14,14,16,14,16,14,15,15,16,16,14,16,15,14,16,16,16,16,13,14,16,15,14,15,15,15,15,15,15,14,15,15,16,15,15,14,15,14,15,15,15,16,14,15,13,13,15,13,16,16,13,14,12,4,10,15,15,15,15,15,15,14,15,15,16,15,15,14,15,14,15,15,15,16,14,15,13,13,15,13,16,14,16,14,13,14,),
(12,11,12,12,11,12,11,12,11,11,12,11,12,12,9,12,11,11,12,12,12,12,11,12,12,12,11,11,12,12,11,12,10,11,12,11,11,11,12,11,11,12,11,11,11,11,12,11,12,11,12,11,11,11,11,11,11,11,12,11,11,11,0,6,10,11,12,11,11,11,12,11,11,12,11,11,11,11,12,11,12,11,12,11,11,11,11,11,11,11,12,12,11,11,11,),
(14,14,13,13,14,11,14,13,14,14,10,11,10,10,9,14,14,14,14,10,14,14,14,14,14,14,14,12,14,14,14,14,11,14,14,14,14,14,14,14,14,10,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,14,14,14,12,2,8,11,14,14,14,14,14,14,14,14,10,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,12,14,14,14,14,),
(14,15,13,13,13,13,15,13,15,13,13,13,13,13,11,13,15,13,13,13,14,13,15,13,13,13,13,13,13,14,14,13,11,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,15,15,15,13,3,9,11,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,13,15,15,15,13,),
(13,14,10,11,12,10,14,11,14,12,11,10,11,10,7,12,14,12,12,10,13,11,14,12,12,11,11,11,12,13,13,11,9,14,11,14,14,14,11,14,14,10,14,14,14,14,11,14,12,14,12,14,14,14,14,14,14,13,14,14,14,11,2,8,9,14,11,14,14,14,11,14,14,10,14,14,14,14,11,14,12,14,12,14,14,14,14,14,14,13,11,14,14,14,12,),
(18,19,16,16,17,15,19,16,19,17,16,15,16,15,13,17,19,17,17,15,18,17,19,17,17,17,17,16,17,18,18,17,14,19,17,19,19,19,17,19,19,15,19,19,19,19,17,19,17,19,17,19,19,19,19,19,19,18,19,19,19,16,7,13,14,19,17,19,19,19,17,19,19,15,19,19,19,19,17,19,17,19,17,19,19,19,19,19,19,18,16,19,19,19,17,),
(13,14,12,13,12,12,14,13,14,12,11,14,11,13,12,12,14,14,13,12,13,12,14,13,13,13,14,10,12,13,13,11,14,14,12,14,14,14,11,14,14,13,14,14,14,14,11,14,12,14,14,14,14,14,14,14,14,14,14,14,14,10,2,8,14,14,12,14,14,14,11,14,14,13,14,14,14,14,11,14,12,14,14,14,14,14,14,14,14,14,12,14,14,14,12,),
(13,14,10,11,12,9,14,10,14,12,9,10,9,9,8,11,14,12,12,9,13,11,14,12,12,11,11,10,11,13,13,11,10,14,11,14,14,14,11,14,14,9,14,14,14,14,11,14,11,14,12,14,14,14,14,14,14,13,14,14,14,9,2,8,10,14,11,14,14,14,11,14,14,9,14,14,14,14,11,14,11,14,12,14,14,14,14,14,14,13,11,14,14,14,12,),
(12,12,12,12,11,12,12,12,12,11,9,12,9,12,10,12,12,12,12,12,12,11,12,12,12,12,12,10,12,12,12,11,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,9,0,6,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,10,12,12,12,11,),
(5,6,4,4,4,4,6,4,6,4,4,5,4,4,6,4,6,4,4,4,5,4,6,4,4,4,5,4,4,5,5,4,4,6,4,6,6,6,4,6,6,4,6,6,6,6,4,6,4,6,4,6,6,6,6,6,6,5,6,6,6,4,6,0,4,6,4,6,6,6,4,6,6,4,6,6,6,6,4,6,4,6,4,6,6,6,6,6,6,5,4,6,6,6,4,),
(8,4,7,8,4,7,7,7,10,4,6,9,6,8,10,7,8,8,8,7,8,7,6,8,8,8,9,5,7,8,7,6,8,8,7,8,8,8,6,8,8,8,8,8,7,8,6,8,7,8,8,5,7,3,4,8,5,8,10,4,10,6,10,4,8,8,7,8,8,8,6,8,8,8,8,8,7,8,6,8,7,8,8,5,7,3,4,8,5,8,7,10,10,3,5,),
(6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,0,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,),
(12,10,11,12,10,13,11,14,13,11,14,10,14,9,10,14,13,10,10,12,14,14,10,12,12,13,13,12,12,10,12,14,11,13,14,13,13,13,14,13,13,8,13,13,11,12,14,13,14,13,13,11,13,11,11,10,10,11,14,11,12,14,2,8,11,13,14,13,13,13,14,13,13,8,13,13,11,12,14,13,14,13,13,11,13,11,11,10,10,11,12,14,12,8,11,),
(7,4,0,1,0,0,9,9,12,3,0,12,6,7,12,0,6,1,0,1,1,0,2,0,0,7,12,2,0,2,3,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12,6,12,0,12,6,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9,12,12,3,6,),
(1,0,0,1,0,0,3,0,0,0,0,2,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,4,0,),
(14,11,13,14,11,13,12,14,14,11,13,15,13,14,13,13,15,15,14,13,14,13,12,14,14,14,15,12,13,14,13,13,15,15,13,15,15,15,13,15,15,14,15,15,14,15,13,15,13,15,15,12,13,11,11,15,12,15,15,11,13,12,3,9,15,15,13,15,15,15,13,15,15,14,15,15,14,15,13,15,13,15,15,12,13,11,11,15,12,15,13,15,13,11,12,),
(14,14,14,14,14,14,14,14,14,14,13,14,13,14,12,14,13,14,14,14,14,14,14,14,14,14,14,13,14,14,14,14,12,13,14,13,13,13,14,13,13,14,13,13,12,13,14,13,14,13,14,14,13,13,13,13,13,13,14,13,12,12,2,8,12,13,14,13,13,13,14,13,13,14,13,13,12,13,14,13,14,13,14,14,13,13,13,13,13,13,13,14,12,13,14,),
(13,13,13,13,13,13,13,13,13,13,8,12,7,13,10,13,13,13,13,13,13,13,13,13,13,13,13,11,13,13,13,13,11,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,12,13,13,13,11,1,7,11,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,12,11,13,13,13,13,),
(13,13,14,13,13,14,13,14,13,13,14,12,14,13,10,14,13,13,13,14,14,14,13,14,14,14,14,14,14,13,13,14,11,13,14,13,13,13,14,13,13,13,13,13,11,12,14,13,14,13,14,14,13,12,12,12,12,12,14,12,12,14,2,8,11,13,14,13,13,13,14,13,13,13,13,13,11,12,14,13,14,13,14,14,13,12,12,12,12,12,14,14,12,12,14,),
(12,12,12,12,12,12,12,12,12,12,10,12,10,12,10,12,12,12,12,12,12,11,12,12,12,12,12,10,12,12,12,11,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,10,0,6,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,10,12,12,12,12,),
(12,12,10,11,12,10,12,11,12,12,10,8,10,7,6,12,12,12,12,10,12,11,12,12,12,11,11,10,12,12,12,11,8,12,11,12,12,12,11,12,12,9,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,11,12,12,12,10,0,6,8,12,11,12,12,12,11,12,12,9,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,11,10,12,12,12,12,),
(14,14,13,13,14,13,14,14,14,14,14,13,14,13,11,14,14,14,14,13,14,14,14,14,14,14,14,12,14,14,14,14,12,14,14,14,14,14,14,14,14,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,14,14,14,14,2,8,12,14,14,14,14,14,14,14,14,13,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,13,14,14,14,14,),
(15,16,15,15,15,15,16,15,16,15,15,16,15,15,14,15,16,16,15,15,15,15,16,15,15,15,16,15,15,15,15,15,16,16,15,16,16,16,15,16,16,15,16,16,16,16,15,16,15,16,16,16,16,16,16,16,16,16,16,16,16,15,4,10,16,16,15,16,16,16,15,16,16,15,16,16,16,16,15,16,15,16,16,16,16,16,16,16,16,16,15,16,16,16,15,),
(6,7,6,6,6,6,7,6,7,6,6,7,6,6,5,6,7,7,6,6,6,6,7,6,6,6,7,6,6,6,6,6,7,7,6,7,7,7,6,7,7,6,7,7,7,7,6,7,6,7,7,7,7,7,7,7,7,7,7,7,7,6,0,1,7,7,6,7,7,7,6,7,7,6,7,7,7,7,6,7,6,7,7,7,7,7,7,7,7,7,6,7,7,7,6,),
(11,12,11,11,11,11,12,11,12,11,11,11,11,11,9,11,12,11,11,11,11,11,12,11,11,11,11,11,11,11,11,11,9,12,11,12,12,12,11,12,12,11,12,12,12,12,11,12,11,12,11,12,12,12,12,12,12,11,12,12,12,11,0,6,9,12,11,12,12,12,11,12,12,11,12,12,12,12,11,12,11,12,11,12,12,12,12,12,12,11,11,12,12,12,11,),
(15,15,14,15,13,14,15,15,15,13,13,16,13,15,14,14,16,16,15,14,15,14,15,15,15,15,16,12,14,15,14,14,16,16,14,16,16,16,14,16,16,15,16,16,15,16,14,16,14,16,16,15,15,15,15,16,15,16,16,15,15,13,4,10,16,16,14,16,16,16,14,16,16,15,16,16,15,16,14,16,14,16,16,15,15,15,15,16,15,16,14,16,15,15,13,),
(12,7,12,12,8,12,9,12,11,6,7,12,6,12,10,12,12,12,12,12,12,12,10,12,12,12,12,7,12,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,11,12,12,12,12,12,12,8,11,8,8,12,9,12,12,8,10,6,0,6,12,12,12,12,12,12,12,12,12,12,12,12,11,12,12,12,12,12,12,8,11,8,8,12,9,12,11,12,10,7,6,),
(17,18,17,17,17,17,18,17,18,17,17,18,17,17,16,17,18,18,17,17,17,17,18,17,17,17,18,17,17,17,17,17,18,18,17,18,18,18,17,18,18,17,18,18,18,18,17,18,17,18,18,18,18,18,18,18,18,18,18,18,18,17,6,12,18,18,17,18,18,18,17,18,18,17,18,18,18,18,17,18,17,18,18,18,18,18,18,18,18,18,17,18,18,18,17,),
(14,15,13,13,13,13,15,13,15,13,13,13,13,13,11,13,15,13,13,13,14,13,15,13,13,13,13,13,13,14,14,13,13,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,15,15,15,13,3,9,13,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,12,15,15,15,13,),
(14,14,15,14,14,15,14,15,14,14,15,13,15,14,11,15,14,14,14,15,15,15,14,15,15,15,15,15,15,14,13,15,12,14,15,14,14,14,15,14,14,14,14,14,12,13,15,14,15,14,15,15,14,13,13,12,13,13,15,13,13,15,3,9,12,14,15,14,14,14,15,14,14,14,14,14,12,13,15,14,15,14,15,15,14,13,13,12,13,13,15,15,13,12,15,),
(14,14,13,13,14,11,14,13,13,14,13,10,13,9,9,14,13,14,14,11,14,14,14,14,14,14,14,14,14,14,13,14,10,13,14,13,13,13,14,13,13,8,13,13,12,13,14,13,14,13,14,14,13,12,12,12,12,13,14,13,12,12,2,8,10,13,14,13,13,13,14,13,13,8,13,13,12,13,14,13,14,13,14,14,13,12,12,12,12,13,12,14,12,13,14,),
(13,13,14,13,13,14,13,14,13,13,14,14,14,13,12,14,14,14,13,14,14,14,13,14,14,14,14,14,14,13,12,14,14,14,14,14,14,14,14,14,14,13,14,14,13,14,14,14,14,14,14,14,13,12,12,14,12,14,14,12,12,14,2,8,14,14,14,14,14,14,14,14,14,13,14,14,13,14,14,14,14,14,14,14,13,12,12,14,12,14,14,14,12,11,14,),
(16,14,14,16,14,15,14,15,15,14,14,16,14,16,14,15,15,16,16,14,16,15,14,16,16,16,16,13,14,16,15,14,15,15,15,15,15,15,14,15,15,16,15,15,14,15,14,15,15,15,16,14,15,13,13,15,13,16,16,13,14,12,4,10,15,15,15,15,15,15,14,15,15,16,15,15,14,15,14,15,15,15,16,14,15,13,13,15,13,16,14,16,14,13,14,),
(12,11,12,12,11,12,11,12,11,11,12,11,12,12,9,12,11,11,12,12,12,12,11,12,12,12,11,11,12,12,11,12,10,11,12,11,11,11,12,11,11,12,11,11,11,11,12,11,12,11,12,11,11,11,11,11,11,11,12,11,11,11,0,6,10,11,12,11,11,11,12,11,11,12,11,11,11,11,12,11,12,11,12,11,11,11,11,11,11,11,12,12,11,11,11,),
(14,14,13,13,14,11,14,13,14,14,10,11,10,10,9,14,14,14,14,10,14,14,14,14,14,14,14,12,14,14,14,14,11,14,14,14,14,14,14,14,14,10,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,14,14,14,12,2,8,11,14,14,14,14,14,14,14,14,10,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,13,12,14,14,14,14,),
(14,15,13,13,13,13,15,13,15,13,13,13,13,13,11,13,15,13,13,13,14,13,15,13,13,13,13,13,13,14,14,13,11,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,15,15,15,13,3,9,11,15,13,15,15,15,13,15,15,13,15,15,15,15,13,15,13,15,13,15,15,15,15,15,15,14,13,15,15,15,13,),
(13,14,10,11,12,10,14,11,14,12,11,10,11,10,7,12,14,12,12,10,13,11,14,12,12,11,11,11,12,13,13,11,9,14,11,14,14,14,11,14,14,10,14,14,14,14,11,14,12,14,12,14,14,14,14,14,14,13,14,14,14,11,2,8,9,14,11,14,14,14,11,14,14,10,14,14,14,14,11,14,12,14,12,14,14,14,14,14,14,13,11,14,14,14,12,),
(18,19,16,16,17,15,19,16,19,17,16,15,16,15,13,17,19,17,17,15,18,17,19,17,17,17,17,16,17,18,18,17,14,19,17,19,19,19,17,19,19,15,19,19,19,19,17,19,17,19,17,19,19,19,19,19,19,18,19,19,19,16,7,13,14,19,17,19,19,19,17,19,19,15,19,19,19,19,17,19,17,19,17,19,19,19,19,19,19,18,16,19,19,19,17,),
(13,14,12,13,12,12,14,13,14,12,11,14,11,13,12,12,14,14,13,12,13,12,14,13,13,13,14,10,12,13,13,11,14,14,12,14,14,14,11,14,14,13,14,14,14,14,11,14,12,14,14,14,14,14,14,14,14,14,14,14,14,10,2,8,14,14,12,14,14,14,11,14,14,13,14,14,14,14,11,14,12,14,14,14,14,14,14,14,14,14,12,14,14,14,12,),
(13,14,10,11,12,9,14,10,14,12,9,10,9,9,8,11,14,12,12,9,13,11,14,12,12,11,11,10,11,13,13,11,10,14,11,14,14,14,11,14,14,9,14,14,14,14,11,14,11,14,12,14,14,14,14,14,14,13,14,14,14,9,2,8,10,14,11,14,14,14,11,14,14,9,14,14,14,14,11,14,11,14,12,14,14,14,14,14,14,13,11,14,14,14,12,),
(12,12,12,12,11,12,12,12,12,11,9,12,9,12,10,12,12,12,12,12,12,11,12,12,12,12,12,10,12,12,12,11,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,12,12,12,9,0,6,12,12,12,12,12,12,11,12,12,12,12,12,12,12,11,12,12,12,12,12,12,12,12,12,12,12,10,12,12,12,11,),
(7,8,6,6,6,6,8,6,8,6,6,7,6,6,8,6,8,6,6,6,7,6,8,6,6,6,7,6,6,7,7,6,6,8,6,8,8,8,6,8,8,6,8,8,8,8,6,8,6,8,6,8,8,8,8,8,8,7,8,8,8,6,8,2,6,8,6,8,8,8,6,8,8,6,8,8,8,8,6,8,6,8,6,8,8,8,8,8,8,7,5,8,8,8,6,),
(3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,),
(6,6,7,6,6,7,6,8,7,6,8,6,8,6,5,8,7,6,6,7,6,8,6,7,6,6,6,6,8,6,6,8,6,7,8,7,7,7,8,7,7,6,7,7,5,6,8,7,8,7,7,6,7,5,5,6,5,6,8,6,6,6,5,2,6,7,8,7,7,7,8,7,7,6,7,7,5,6,8,7,8,7,7,6,7,5,5,6,5,6,8,8,5,6,6,),
(7,8,3,4,6,3,8,4,8,5,0,5,3,4,1,5,8,6,6,1,7,4,8,6,6,4,4,0,0,1,7,3,2,8,3,8,8,8,4,8,8,3,8,8,8,8,4,8,5,8,6,8,8,8,8,8,8,7,8,8,8,3,0,6,2,8,3,8,8,8,4,8,8,3,8,8,8,8,4,8,5,8,6,8,8,8,8,8,8,7,5,8,8,8,6,),
(6,6,5,5,6,3,6,5,6,6,1,2,0,1,1,6,5,6,6,2,6,6,6,6,6,6,6,5,6,6,6,6,2,5,6,5,5,5,6,5,5,0,5,5,4,5,6,5,6,5,6,6,5,5,5,5,5,5,6,5,4,4,0,0,2,5,6,5,5,5,6,5,5,0,5,5,4,5,6,5,6,5,6,6,5,5,5,5,5,5,4,6,4,5,6,),
)
# End of font
| HudsonWerks/OLED-SSD1306 | ssd1306/fonts/stencil_24.py | Python | lgpl-3.0 | 98,068 | 0.156068 |
#!/usr/bin/env python
import boto
from boto.s3.key import Key
OrdinaryCallingFormat = boto.config.get('s3', 'calling_format', 'boto.s3.connection.OrdinaryCallingFormat')
s3 = boto.connect_s3(host='localhost', port=10001, calling_format=OrdinaryCallingFormat, is_secure=False)
b = s3.get_bucket('mocking')
k_cool = Key(b)
k_cool.key = 'cool.html'
k_cool.set_contents_from_string('this is some really cool html')
k_green = Key(b)
k_green.key = 'green.html'
k_green.set_contents_from_string('this is some really good music html')
k_horse = Key(b)
k_horse.key = 'seminoles.html'
k_horse.set_contents_from_string('this is some really seminoles html')
| SivagnanamCiena/mock-s3 | tests/push.py | Python | mit | 652 | 0.003067 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_grant_badge_wizard(osv.TransientModel):
_name = 'gamification.badge.user.wizard'
_inherit = ['gamification.badge.user.wizard']
_columns = {
'employee_id': fields.many2one("hr.employee", string='Employee', required=True),
'user_id': fields.related("employee_id", "user_id",
type="many2one", relation="res.users",
store=True, string='User')
}
def action_grant_badge(self, cr, uid, ids, context=None):
"""Wizard action for sending a badge to a chosen employee"""
if context is None:
context = {}
badge_user_obj = self.pool.get('gamification.badge.user')
for wiz in self.browse(cr, uid, ids, context=context):
if not wiz.user_id:
raise osv.except_osv(_('Warning!'), _('You can send badges only to employees linked to a user.'))
if uid == wiz.user_id.id:
raise osv.except_osv(_('Warning!'), _('You can not send a badge to yourself'))
values = {
'user_id': wiz.user_id.id,
'sender_id': uid,
'badge_id': wiz.badge_id.id,
'employee_id': wiz.employee_id.id,
'comment': wiz.comment,
}
badge_user = badge_user_obj.create(cr, uid, values, context=context)
result = badge_user_obj._send_badge(cr, uid, [badge_user], context=context)
return result
| ncliam/serverpos | openerp/addons/hr_gamification/wizard/grant_badge.py | Python | agpl-3.0 | 2,525 | 0.002376 |
# -*- coding: utf-8 -*-
'''
fantastic Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
from resources.lib.modules import source_utils
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['rlsbb.online', 'rlsbb.co']
self.base_link = 'http://rlsbb.co/'
self.search_link = '/search/%s/feed/rss2/'
self.search_link2 = '/?s=%s&submit=Find'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() is False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
posts = client.parseDOM(r, 'item')
hostDict = hostprDict + hostDict
items = []
for post in posts:
try:
t = client.parseDOM(post, 'title')[0]
u = client.parseDOM(post, 'enclosure', ret='url', attrs={'type': 'video.+?'})
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GiB|MiB|GB|MB))', post)
s = s[0] if s else '0'
items += [(t, i, s) for i in u]
except:
pass
for item in items:
try:
name = item[0]
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if not y == hdlr: raise Exception()
quality, info = source_utils.get_release_quality(name, item[1])
try:
size = re.sub('i', '', item[2])
print size
div = 1 if size.endswith('GB') else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
size = '%.2f GB' % size
info.append(size)
except:
pass
info = ' | '.join(info)
url = item[1]
if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
valid, host = source_utils.is_host_valid(url, hostDict)
if not valid: continue
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True})
except:
pass
check = [i for i in sources if not i['quality'] == 'CAM']
if check: sources = check
return sources
except:
return sources
def resolve(self, url):
return url
| TheWardoctor/Wardoctors-repo | script.module.fantastic/lib/resources/lib/sources/en/rlsbb.py | Python | apache-2.0 | 5,587 | 0.013782 |
class Bunch(dict):
"""A dict with attribute-access"""
def __getattr__(self, key):
try:
return self.__getitem__(key)
except KeyError:
raise AttributeError(key)
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __dir__(self):
return self.keys()
basemaps = Bunch(
OpenStreetMap = Bunch(
Mapnik = dict(
url = 'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = 'Map data (c) <a href="https://openstreetmap.org">OpenStreetMap</a> contributors',
name = 'OpenStreetMap.Mapnik'
),
BlackAndWhite = dict(
url = 'http://{s}.tiles.wmflabs.org/bw-mapnik/{z}/{x}/{y}.png',
max_zoom = 18,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'OpenStreetMap.BlackAndWhite',
),
DE = dict(
url = 'http://{s}.tile.openstreetmap.de/tiles/osmde/{z}/{x}/{y}.png',
max_zoom = 18,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'OpenStreetMap.DE'
),
France = dict(
url = 'http://{s}.tile.openstreetmap.fr/osmfr/{z}/{x}/{y}.png',
max_zoom = 20,
attribution = '© Openstreetmap France | © <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'OpenStreetMap.France'
),
HOT = dict(
url = 'http://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>, Tiles courtesy of <a href="http://hot.openstreetmap.org/" target="_blank">Humanitarian OpenStreetMap Team</a>',
name = 'OpenStreetMap.HOT'
)
),
OpenTopoMap = dict(
url = 'https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png',
max_zoom = 17,
attribution = 'Map data: © <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>, <a href="http://viewfinderpanoramas.org">SRTM</a> | Map style: © <a href="https://opentopomap.org">OpenTopoMap</a> (<a href="https://creativecommons.org/licenses/by-sa/3.0/">CC-BY-SA</a>)',
name = 'OpenTopMap'
),
OpenMapSurfer = Bunch(
Roads = dict(
url = 'http://korona.geog.uni-heidelberg.de/tiles/roads/x={x}&y={y}&z={z}',
max_zoom = 20,
attribution = 'Imagery from <a href="http://giscience.uni-hd.de/">GIScience Research Group @ University of Heidelberg</a> — Map data © <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'OpenMapSurfer.Roads'
),
Grayscale = dict(
url = 'http://korona.geog.uni-heidelberg.de/tiles/roadsg/x={x}&y={y}&z={z}',
max_zoom = 19,
attribution = 'Imagery from <a href="http://giscience.uni-hd.de/">GIScience Research Group @ University of Heidelberg</a> — Map data © <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'OpenMapSurfer.Grayscale'
)
),
Hydda = Bunch(
Full = dict(
url = 'http://{s}.tile.openstreetmap.se/hydda/full/{z}/{x}/{y}.png',
max_zoom = 18,
attribution = 'Tiles courtesy of <a href="http://openstreetmap.se/" target="_blank">OpenStreetMap Sweden</a> — Map data © <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'Hydda.Full'
),
Base = dict(
url = 'http://{s}.tile.openstreetmap.se/hydda/base/{z}/{x}/{y}.png',
max_zoom = 18,
attribution = 'Tiles courtesy of <a href="http://openstreetmap.se/" target="_blank">OpenStreetMap Sweden</a> — Map data © <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'Hydda.Base'
),
),
Esri = Bunch(
WorldStreetMap = dict(
url = 'http://server.arcgisonline.com/ArcGIS/rest/services/World_Street_Map/MapServer/tile/{z}/{y}/{x}',
max_zoom = 20,
attribution = 'Tiles © Esri — Source: Esri, DeLorme, NAVTEQ, USGS, Intermap, iPC, NRCAN, Esri Japan, METI, Esri China (Hong Kong), Esri (Thailand), TomTom, 2012',
name = 'Esri.WoldStreetMap'
),
DeLorme = dict(
url = 'http://server.arcgisonline.com/ArcGIS/rest/services/Specialty/DeLorme_World_Base_Map/MapServer/tile/{z}/{y}/{x}',
min_zoom = 1,
max_zoom = 11,
attribution = 'Tiles © Esri — Copyright: ©2012 DeLorme',
name = 'Esri.DeLorme'
),
WorldTopoMap = dict(
url = 'http://server.arcgisonline.com/ArcGIS/rest/services/World_Topo_Map/MapServer/tile/{z}/{y}/{x}',
max_zoom = 20,
attribution = 'Tiles © Esri — Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community',
name = 'Esri.WorldTopoMap'
),
WorldImagery = dict(
url = 'http://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
max_zoom = 20,
attribution = 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community',
name = 'Esri.WorldImagery'
),
NatGeoWorldMap = dict(
url = 'http://server.arcgisonline.com/ArcGIS/rest/services/NatGeo_World_Map/MapServer/tile/{z}/{y}/{x}',
max_zoom = 16,
attribution = 'Tiles © Esri — National Geographic, Esri, DeLorme, NAVTEQ, UNEP-WCMC, USGS, NASA, ESA, METI, NRCAN, GEBCO, NOAA, iPC',
name = 'Esri.NatGeoWorldMap'
),
),
HikeBike = Bunch(
HikeBike = dict(
url = 'http://{s}.tiles.wmflabs.org/hikebike/{z}/{x}/{y}.png',
max_zoom = 19,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>',
name = 'HikeBike.HikeBike'
),
),
MtbMap = dict(
url = 'http://tile.mtbmap.cz/mtbmap_tiles/{z}/{x}/{y}.png',
max_zoom = 20,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> & USGS',
name = 'MtbMap'
),
CartoDB = Bunch(
Positron = dict(
url = 'http://c.basemaps.cartocdn.com/light_all/{z}/{x}/{y}.png',
max_zoom = 20,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> © <a href="http://cartodb.com/attributions">CartoDB</a>',
name = 'CartoDB.Positron'
),
DarkMatter = dict(
url = 'http://c.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}.png',
max_zoom = 20,
attribution = '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a> © <a href="http://cartodb.com/attributions">CartoDB</a>',
name = 'CartoDB.DarkMatter'
)
),
NASAGIBS = Bunch(
ModisTerraTrueColorCR = dict(
url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Terra_CorrectedReflectance_TrueColor/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg',
max_zoom = 9,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.ModisTerraTrueColorCR'
),
ModisTerraBands367CR = dict(
url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Terra_CorrectedReflectance_Bands367/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg',
max_zoom = 9,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.ModisTerraBands367CR'
),
ModisTerraBands721CR = dict(
url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Terra_CorrectedReflectance_Bands721/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg',
max_zoom = 9,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.MidsTerraBands721CR'
),
ModisAquaTrueColorCR = dict(
url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Aqua_CorrectedReflectance_TrueColor/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg',
max_zoom = 9,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.ModisAquaTrueColorCR'
),
ModisAquaBands721CR = dict(
url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/MODIS_Aqua_CorrectedReflectance_Bands721/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg',
max_zoom = 9,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.ModisAquaBands721CR'
),
ViirsTrueColorCR = dict(
url = 'https://gibs.earthdata.nasa.gov/wmts/epsg3857/best/VIIRS_SNPP_CorrectedReflectance_TrueColor/default/%s/GoogleMapsCompatible_Level9/{z}/{y}/{x}.jpg',
max_zoom = 9,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.ViirsTrueColorCR'
),
ViirsEarthAtNight2012 = dict(
url = 'http://gibs.earthdata.nasa.gov/wmts/epsg3857/best/VIIRS_Black_Marble/default/2012-01-01/GoogleMapsCompatible_Level8/{z}/{y}/{x}.png',
max_zoom = 8,
attribution = 'Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (<a href="https://earthdata.nasa.gov">ESDIS</a>) with funding provided by NASA/HQ.',
name = 'NASAGIBS.ViirsEarthAtNight2012'
)
),
Strava = Bunch(
All = dict(
url = 'https://heatmap-external-a.strava.com//tiles/all/hot/{z}/{x}/{y}.png?v=19',
max_zoom = 15,
attribution = 'Map tiles by <a href="https://labs.strava.com/heatmap">Strava 2017</a>',
name = 'Strava.All'
),
Ride = dict(
url = 'https://heatmap-external-a.strava.com//tiles/ride/hot/{z}/{x}/{y}.png?v=19',
max_zoom = 15,
attribution = 'Map tiles <a href="https://labs.strava.com/heatmap">Strava 2017</a>',
name = 'Strava.Ride'
),
Run = dict(
url = 'https://heatmap-external-a.strava.com//tiles/run/bluered/{z}/{x}/{y}.png?v=19',
max_zoom = 15,
attribution = 'Map tiles by <a href="https://labs.strava.com/heatmap">Strava 2017</a>',
name = 'Strava.Run'
),
Water = dict(
url = 'https://heatmap-external-a.strava.com//tiles/water/blue/{z}/{x}/{y}.png?v=19',
max_zoom = 15,
attribution = 'Map tiles by <a href="https://labs.strava.com/heatmap">Strava 2017</a>',
name = 'Strava.Water'
),
Winter = dict(
url = 'https://heatmap-external-a.strava.com//tiles/winter/hot/{z}/{x}/{y}.png?v=19',
max_zoom = 15,
attribution = 'Map tiles by <a href="https://labs.strava.com/heatmap">Strava 2017</a>',
name = 'Strava.Winter'
)
),
Stamen = Bunch(
Terrain = dict(
url = 'http://stamen-tiles-a.a.ssl.fastly.net/terrain/{z}/{x}/{y}.png',
attribution = ''.join([
'Map tiles by <a href="http://stamen.com/">Stamen Design</a>, ',
'under <a href="http://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. ',
'Data by <a href="http://openstreetmap.org/">OpenStreetMap</a>, ',
'under <a href="http://creativecommons.org/licenses/by-sa/3.0">CC BY SA</a>.'
]),
name = 'Stamen.Terrain',
min_zoom = 0,
max_zoom = 18
),
Toner = dict(
url = 'http://stamen-tiles-a.a.ssl.fastly.net/toner/{z}/{x}/{y}.png',
attribution = ''.join([
'Map tiles by <a href="http://stamen.com/">Stamen Design</a>, ',
'under <a href="http://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. ',
'Data by <a href="http://openstreetmap.org/">OpenStreetMap</a>, ',
'under <a href="http://creativecommons.org/licenses/by-sa/3.0">CC BY SA</a>.'
]),
name = 'Stamen.Toner',
min_zoom = 0,
max_zoom = 20
),
Watercolor = dict(
url = 'http://stamen-tiles-a.a.ssl.fastly.net/watercolor/{z}/{x}/{y}.png',
attribution = ''.join([
'Map tiles by <a href="http://stamen.com/">Stamen Design</a>, ',
'under <a href="http://creativecommons.org/licenses/by/3.0">CC BY 3.0</a>. ',
'Data by <a href="http://openstreetmap.org/">OpenStreetMap</a>, ',
'under <a href="http://creativecommons.org/licenses/by-sa/3.0">CC BY SA</a>.'
]),
name = 'Stamen.Watercolor',
min_zoom = 1,
max_zoom = 18
)
)
)
| ellisonbg/ipyleaflet | ipyleaflet/basemaps.py | Python | mit | 14,548 | 0.02997 |
import hug
try:
from . import runserver
##to run windowslogonofflogger
##https://github.com/bengjerstad/windowslogonofflogger
hug.API(__name__).extend(runserver, '')
print('Running windowslogonofflogger Server')
except:
pass
try:
from . import logserver
##to run MulitUse Log Server
##https://github.com/bengjerstad/multiuselogserver
hug.API(__name__).extend(logserver, '/logserver')
print('Running MultiUselog Server')
except:
pass
| bengjerstad/windowslogonofflogger | logserver/__init__.py | Python | mit | 450 | 0.048889 |
import os
import urlparse
from SocketServer import ThreadingMixIn
from wsgiref.util import setup_testing_defaults
from wsgiref.simple_server import make_server, WSGIServer
from lightningjs.http.gzipper import GzipperMiddleware
class ThreadedWsgiServer(ThreadingMixIn, WSGIServer):
pass
class RoutableApplication(object):
def __init__(self, routable_object):
self.__routable_object = routable_object
def __call__(self, environ, start_response):
# parse the request
setup_testing_defaults(environ)
path = environ['PATH_INFO']
querystring = environ['QUERY_STRING']
multiargs = urlparse.parse_qs(querystring)
# get the route and the associated Python method, then execute
# that method with the given querystring parameters as Python kwargs
if path[1:]:
path_method = 'get_%s' % path[1:]
else:
path_method = 'get_index'
if hasattr(self.__routable_object, path_method):
# call the routed method
single_value_args = {}
for key in multiargs:
single_value_args[key] = multiargs[key][0]
status, content_type, content = getattr(self.__routable_object, path_method)(**single_value_args)
else:
# route doesn't exist
content_type = 'text/html'
content = status = '404 NOT FOUND'
# write out the HTTP response
status = '200 OK'
headers = [('Content-type', content_type)]
start_response(status, headers)
return [content]
def serve_routable_object(routable_object, port):
routable_server = RoutableApplication(routable_object=routable_object)
httpd = make_server(
host='',
port=port,
app=GzipperMiddleware(routable_server, compresslevel=8),
server_class=ThreadedWsgiServer,
)
httpd.serve_forever()
def render_browser_template(path, **kwargs):
template_path = os.path.join(os.path.dirname(__file__), 'templates', path)
with open(template_path, 'r') as template_fd:
content = template_fd.read()
if kwargs:
# do Python string templates if given
content = content % kwargs
return content
| FocusLab/willie | vendor/lightningjs/lib/python/lightningjs/http/__init__.py | Python | bsd-3-clause | 2,265 | 0.001766 |
import sure # noqa # pylint: disable=unused-import
import unittest
import boto3
from moto import mock_sqs, mock_ec2
from tests import EXAMPLE_AMI_ID
class TestNestedDecoratorsBoto3(unittest.TestCase):
@mock_sqs
def setup_sqs_queue(self):
conn = boto3.resource("sqs", region_name="us-east-1")
queue = conn.create_queue(QueueName="some-queue")
queue.send_message(MessageBody="test message 1")
queue.reload()
queue.attributes["ApproximateNumberOfMessages"].should.equal("1")
@mock_ec2
def test_nested(self):
self.setup_sqs_queue()
conn = boto3.client("ec2", region_name="us-west-2")
conn.run_instances(ImageId=EXAMPLE_AMI_ID, MinCount=1, MaxCount=1)
| spulec/moto | tests/test_core/test_nested.py | Python | apache-2.0 | 736 | 0 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_kaja_orzee.iff"
result.attribute_template_id = 9
result.stfName("theme_park_name","kaja_orzee")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/mobile/shared_kaja_orzee.py | Python | mit | 437 | 0.048055 |
from django.apps import apps
from django.contrib import admin
AccessToken = apps.get_model('oauth2', 'AccessToken')
Client = apps.get_model('oauth2', 'Client')
Grant = apps.get_model('oauth2', 'Grant')
RefreshToken = apps.get_model('oauth2', 'RefreshToken')
class AccessTokenAdmin(admin.ModelAdmin):
list_display = ('user', 'client', 'token', 'expires', 'scope')
raw_id_fields = ('user',)
class GrantAdmin(admin.ModelAdmin):
list_display = ('user', 'client', 'code', 'expires')
raw_id_fields = ('user',)
class ClientAdmin(admin.ModelAdmin):
list_display = ('url', 'user', 'redirect_uri', 'client_id', 'client_type')
raw_id_fields = ('user',)
admin.site.register(AccessToken, AccessTokenAdmin)
admin.site.register(Grant, GrantAdmin)
admin.site.register(Client, ClientAdmin)
admin.site.register(RefreshToken)
| depop/django-oauth2-provider | provider/oauth2/admin.py | Python | mit | 840 | 0 |
# encoding: utf-8
# module zipimport
# from (built-in)
# by generator 1.130
"""
zipimport provides support for importing Python modules from Zip archives.
This module exports three objects:
- zipimporter: a class; its constructor takes a path to a Zip archive.
- ZipImportError: exception raised by zipimporter objects. It's a
subclass of ImportError, so it can be caught as ImportError, too.
- _zip_directory_cache: a dict, mapping archive paths to zip directory
info dicts, as used in zipimporter._files.
It is usually not needed to use the zipimport module explicitly; it is
used by the builtin import mechanism for sys.path items that are paths
to Zip archives.
"""
# no imports
# no functions
# classes
class zipimporter(object):
"""
zipimporter(archivepath) -> zipimporter object
Create a new zipimporter instance. 'archivepath' must be a path to
a zipfile, or to a specific path inside a zipfile. For centralfitestoque, it can be
'/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a
valid directory inside the archive.
'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip
archive.
The 'archive' attribute of zipimporter objects contains the name of the
zipfile targeted.
"""
def find_module(self, fullname, path=None): # real signature unknown; restored from __doc__
"""
find_module(fullname, path=None) -> self or None.
Search for a module specified by 'fullname'. 'fullname' must be the
fully qualified (dotted) module name. It returns the zipimporter
instance itself if the module was found, or None if it wasn't.
The optional 'path' argument is ignored -- it's there for compatibility
with the importer protocol.
"""
return self
def get_code(self, fullname): # real signature unknown; restored from __doc__
"""
get_code(fullname) -> code object.
Return the code object for the specified module. Raise ZipImportError
if the module couldn't be found.
"""
pass
def get_data(self, pathname): # real signature unknown; restored from __doc__
"""
get_data(pathname) -> string with file data.
Return the data associated with 'pathname'. Raise IOError if
the file wasn't found.
"""
return ""
def get_filename(self, fullname): # real signature unknown; restored from __doc__
"""
get_filename(fullname) -> filename string.
Return the filename for the specified module.
"""
pass
def get_source(self, fullname): # real signature unknown; restored from __doc__
"""
get_source(fullname) -> source string.
Return the source code for the specified module. Raise ZipImportError
if the module couldn't be found, return None if the archive does
contain the module, but has no source for it.
"""
pass
def is_package(self, fullname): # real signature unknown; restored from __doc__
"""
is_package(fullname) -> bool.
Return True if the module specified by fullname is a package.
Raise ZipImportError if the module couldn't be found.
"""
pass
def load_module(self, fullname): # real signature unknown; restored from __doc__
"""
load_module(fullname) -> module.
Load the module specified by 'fullname'. 'fullname' must be the
fully qualified (dotted) module name. It returns the imported
module, or raises ZipImportError if it wasn't found.
"""
pass
def __getattribute__(self, name): # real signature unknown; restored from __doc__
""" x.__getattribute__('name') <==> x.name """
pass
def __init__(self, archivepath): # real signature unknown; restored from __doc__
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
def __repr__(self): # real signature unknown; restored from __doc__
""" x.__repr__() <==> repr(x) """
pass
archive = property(lambda self: '')
""":type: string"""
prefix = property(lambda self: '')
""":type: string"""
_files = property(lambda self: {})
""":type: dict"""
class ZipImportError(ImportError):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
# variables with complex values
_zip_directory_cache = {} # real value of type <type 'dict'> skipped
| akiokio/centralfitestoque | src/.pycharm_helpers/python_stubs/-1807332816/zipimport.py | Python | bsd-2-clause | 4,893 | 0.008379 |
# -*- coding: utf-8 -*-
# This module contains classes to manage EL1USB device
# This class reads the content of the EL-USB-1 thermometer
import sys
import datetime
import time
class el1_input:
"Doc ..."
def __init__(self):
self.fake = 0
def request(self, text, base_value, min_value, max_value):
print text,
while base_value < min_value or base_value > max_value:
base_value = input()
if base_value < min_value:
print "value too low, should be between", min_value, "and", max_value
if base_value > max_value:
print "value too high, should be between", min_value, "and", max_value
return base_value
def convert_name(self, name):
new_buffer = []
for char in name:
new_buffer.append(ord(char))
count = len(name)
while count != 16:
new_buffer.append(0)
count += 1
return new_buffer
| eltuxusb/eltuxusb | eltuxusb/el_input.py | Python | gpl-3.0 | 981 | 0.004077 |
from kolibri.auth.api import KolibriAuthPermissions, KolibriAuthPermissionsFilter
from kolibri.content.api import OptionalPageNumberPagination
from rest_framework import filters, viewsets
from .models import ContentRatingLog, ContentSessionLog, ContentSummaryLog, UserSessionLog
from .serializers import ContentRatingLogSerializer, ContentSessionLogSerializer, ContentSummaryLogSerializer, UserSessionLogSerializer
class ContentSessionLogFilter(filters.FilterSet):
class Meta:
model = ContentSessionLog
fields = ['user_id', 'content_id']
class ContentSessionLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, filters.DjangoFilterBackend)
queryset = ContentSessionLog.objects.all()
serializer_class = ContentSessionLogSerializer
pagination_class = OptionalPageNumberPagination
filter_class = ContentSessionLogFilter
class ContentSummaryFilter(filters.FilterSet):
class Meta:
model = ContentSummaryLog
fields = ['user_id', 'content_id']
class ContentSummaryLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter, filters.DjangoFilterBackend)
queryset = ContentSummaryLog.objects.all()
serializer_class = ContentSummaryLogSerializer
pagination_class = OptionalPageNumberPagination
filter_class = ContentSummaryFilter
class ContentRatingLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = ContentRatingLog.objects.all()
serializer_class = ContentRatingLogSerializer
pagination_class = OptionalPageNumberPagination
class UserSessionLogViewSet(viewsets.ModelViewSet):
permission_classes = (KolibriAuthPermissions,)
filter_backends = (KolibriAuthPermissionsFilter,)
queryset = UserSessionLog.objects.all()
serializer_class = UserSessionLogSerializer
pagination_class = OptionalPageNumberPagination
| ralphiee22/kolibri | kolibri/logger/api.py | Python | mit | 2,079 | 0.002886 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def _RunTests(input_api, output_api):
return (input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api, '.', files_to_check=[r'.+_test.py$']))
def CheckChangeOnUpload(input_api, output_api):
return _RunTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunTests(input_api, output_api)
| youtube/cobalt | build/util/lib/common/PRESUBMIT.py | Python | bsd-3-clause | 513 | 0.005848 |
import sys
import unittest
from unittest import TestCase
try:
import faulthandler
except ImportError:
pass
else:
try:
# May fail in IPython Notebook with UnsupportedOperation
faulthandler.enable()
except BaseException as e:
msg = "Failed to enable faulthandler due to:\n{err}"
warnings.warn(msg.format(err=e))
# Try to inject Numba's unittest customizations.
from . import customize
def discover_tests(startdir):
"""Discover test under a directory
"""
# Avoid importing unittest
loader = unittest.TestLoader()
suite = loader.discover(startdir)
return suite
def run_tests(suite=None, xmloutput=None, verbosity=1):
"""
args
----
- suite [TestSuite]
A suite of all tests to run
- xmloutput [str or None]
Path of XML output directory (optional)
- verbosity [int]
Verbosity level of tests output
Returns the TestResult object after running the test *suite*.
"""
if suite is None:
suite = discover_tests("llvmlite.tests")
if xmloutput is not None:
import xmlrunner
runner = xmlrunner.XMLTestRunner(output=xmloutput)
else:
runner = None
prog = unittest.main(suite=suite, testRunner=runner, exit=False,
verbosity=verbosity)
return prog.result
def main():
res = run_tests()
sys.exit(0 if res.wasSuccessful() else 1)
| m-labs/llvmlite | llvmlite/tests/__init__.py | Python | bsd-2-clause | 1,433 | 0.000698 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
MENU_FILE = "File"
FILE_NEW = "New"
FILE_OPEN = "Open"
FILE_EXIT = "Exit"
TAB_DATA = "Data"
TAB_SQL = "SQL"
BUTTON_EXIT = "Exit"
| Smaed/pyDbManager | lib/Lang.py | Python | gpl-2.0 | 228 | 0.004386 |
"""
WSGI config for project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
application = get_wsgi_application()
| djangomini/djangomini | test_project/wsgi.py | Python | mit | 373 | 0 |
# -*- coding: UTF-8 -*-
# Copyright 2013-2014 by Luc Saffre.
# License: BSD, see LICENSE for more details.
"""
Turns a list of items into an endless loop.
Useful when generating demo fixtures.
>>> from lino.utils import Cycler
>>> def myfunc():
... yield "a"
... yield "b"
... yield "c"
>>> c = Cycler(myfunc())
>>> s = ""
>>> for i in range(10):
... s += c.pop()
>>> print (s)
abcabcabca
An empty Cycler or a Cycler on an empty list will endlessly pop None values:
>>> c = Cycler()
>>> print (c.pop(), c.pop(), c.pop())
None None None
>>> c = Cycler([])
>>> print (c.pop(), c.pop(), c.pop())
None None None
>>> c = Cycler(None)
>>> print (c.pop(), c.pop(), c.pop())
None None None
"""
from __future__ import unicode_literals
from __future__ import print_function
from builtins import object
class Cycler(object):
def __init__(self, *args):
"""
If there is exactly one argument, then this must be an iterable
and will be used as the list of items to cycle on.
If there is more than one positional argument, then these
arguments themselves will be the list of items.
"""
if len(args) == 0:
self.items = []
elif len(args) == 1:
if args[0] is None:
self.items = []
else:
self.items = list(args[0])
else:
self.items = args
self.current = 0
def pop(self):
if len(self.items) == 0:
return None
item = self.items[self.current]
self.current += 1
if self.current >= len(self.items):
self.current = 0
if isinstance(item, Cycler):
return item.pop()
return item
def __len__(self):
return len(self.items)
def reset(self):
self.current = 0
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
| khchine5/lino | lino/utils/cycler.py | Python | bsd-2-clause | 1,929 | 0.000518 |
# Local imports
from connect import m
from crypto import *
#NOTE: SQL '?' tuples must use '_t' as var name
#NOTE: the tuples that fetch(one|all)() returns should be called 'res'
#######################
# USER CLASS #
#######################
# IDs and names are stored as fields because they
# are used to query the database. Everything else
# is queried itself.
class User:
def __init__(self, id):
self.id = id
def get_name(self):
_t = (self.id,)
m.execute("SELECT name FROM Users WHERE id = ?", _t)
res = m.fetchone()
return res[0]
def mod_lkarma(self, inc=True):
if inc:
delta = "+1"
else:
delta = "-1"
_t = (self.id,self.id)
m.execute("UPDATE Users SET lkarma = ( ( SELECT lkarma FROM Users WHERE id = ? ) "+delta+" ) WHERE id = ?", _t)
def mod_ckarma(self, inc=True):
if inc:
delta = "+1"
else:
delta = "-1"
_t = (self.id,self.id)
m.execute("UPDATE Users SET ckarma = ( ( SELECT ckarma FROM Users WHERE id = ? ) "+delta+" ) WHERE id = ?", _t)
def get_lkarma(self):
_t = (self.id,)
m.execute("SELECT lkarma FROM Users WHERE id = ?", _t)
res = m.fetchone()
return res[0]
def get_ckarma(self):
_t = (self.id,)
m.execute("SELECT ckarma FROM Users WHERE id = ?", _t)
res = m.fetchone()
return res[0]
#######################
# USERACCOUNT ACTIONS #
#######################
# Functions related to user auth and
# other account-related activities.
def new_user(username, password, email):
if username_exists(username):
return False
id = get_new_id()
salt = get_salt()
hash = get_hash(password, salt)
_t = (id, username, email, hash, salt, 0, 0)
m.execute("INSERT INTO Users (id, name, mail, hash, salt, lkarma, ckarma) VALUES(?,?,?,?,?,?,?)", _t)
return True
def verify_user(username, password):
_t = (username,)
m.execute("SELECT * FROM Users WHERE name IS ?", _t)
res = m.fetchone()
assert res[1] == username
if get_hash(password, res[4]) == res[3]:
return True
else:
return False
def username_exists(username):
_t = (username,)
m.execute("SELECT COUNT(1) FROM Users WHERE name IS ?", _t)
if m.fetchone()[0] == 1:
return True
######################
# USER GETTERS #
######################
# These functions get Users.
# Getting user-specific data is done objectively.
# BULK GETTERS
def get_users():
users = []
m.execute("SELECT id FROM Users")
for res in m.fetchall():
user = User(res[0])
users.append(user)
return users
def get_users_by_mail(mail):
users = []
_t = (mail,)
m.execute("SELECT id FROM Users WHERE mail IS ?", _t)
for res in m.fetchall():
user = User(res[0])
users.append(user)
return users
# SINGLE GETTERS
def get_user(id):
return User(id)
def get_user_by_name(name):
_t = (name,)
m.execute("SELECT id FROM Users WHERE name IS ?", _t)
res = m.fetchone()
return User(res[0])
#######################
# MISC #
#######################
def get_new_id():
m.execute("SELECT id FROM Users WHERE id = ( SELECT MAX(id) FROM Users )")
res = m.fetchone()
if not res: # No users
return 0
else:
return int(res[0])+1
| shelt/Fries | modules/user.py | Python | apache-2.0 | 3,458 | 0.006362 |
import subprocess
import multiprocessing
import select
import fcntl, os
import re
import matplotlib.pyplot as plt
import numpy as np
import time
import evolver
import logging
evolver.logger.setLevel(logging.DEBUG)
evolver.logger.addHandler(logging.StreamHandler())
def find_values(fileloc, phrase):
wf = open(fileloc, 'r')
words = wf.read().split()
i = 0
param = list()
for x in words:
if x == phrase:
param.append(words[i + 2])
break
i += 1
return param
def define_values(bo):
wf = open('./dropSinusoidal.fe', 'r')
m = wf.read()
m = re.sub('Bo = \w*.\w*', 'Bo = %f' % bo, m)
wf = open('./dropSinusoidal.fe', 'w')
wf.write(m)
def main():
"""The main routine."""
with evolver.Evolver() as E:
n = 1000
init_bo = 0
params = np.empty([3, n])
for i in range(n):
print '-----------------\nTime Around ' + str(i)
bo = init_bo + 0.05*i
define_values(bo)
E.open_file('dropSinusoidal.fe')
for j in range(1):
#E.refine(1)
vals = E.evolve(1)
E.run_command('car_app')
E.run_command('car')
E.run_command('')
E.run_command('dump')
E.run_command('')
E.close_file()
params[0, i] = bo
params[1, i] = find_values('dropSinusoidal.fe.dmp', 'contact_angle_right')[0]
params[2, i] = find_values('dropSinusoidal.fe.dmp', 'contact_angle_right_app')[0]
print 'To Plot'
plt.plot(params[0, :], params[1, :], label="Contact Angle")
plt.plot(params[0, :], params[2, :], label="Apparent Contact Angle")
plt.ylabel('Contact Angle')
plt.xlabel('Bond Number (Bo)')
plt.legend()
plt.show()
if __name__ == "__main__":
main()
| ulmusic/python-evolver | test/DS_sweep.py | Python | gpl-2.0 | 1,956 | 0.00818 |
# Copyright (C) 2018
# Max Planck Institute for Polymer Research
#
# This file is part of ESPResSo++.
#
# ESPResSo++ is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo++ is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""
***************************************
espressopp.interaction.TabulatedSubEnsAngular
***************************************
.. function:: espressopp.interaction.TabulatedSubEnsAngular(dim, itype, filenames)
:param dim: Number of potentials to be used for this interaction
:param itype: The interpolation type: 1 - linear, 2 - akima spline, 3 - cubic spline
:param filenames: The tabulated potential filenames.
:type itype: int
:type filename: str
.. function:: espressopp.interaction.FixedTripleListTabulatedSubEnsAngular(system, ftl, potential)
:param system: The Espresso++ system object.
:param ftl: The FixedTripleList.
:param potential: The potential.
:type system: espressopp.System
:type ftl: espressopp.FixedTripleList
:type potential: espressopp.interaction.Potential
.. function:: espressopp.interaction.FixedTripleListTabulatedSubEnsAngular.setPotential(potential)
:param potential: The potential object.
:type potential: espressopp.interaction.Potential
.. function:: espressopp.interaction.FixedTripleListTypesTabulatedSubEnsAngular(system, ftl)
:param system: The Espresso++ system object.
:type system: espressopp.System
:param ftl: The FixedTriple list.
:type ftl: espressopp.FixedTripleList
.. function:: espressopp.interaction.FixedTripleListTypesTabulatedSubEnsAngular.setPotential(type1, type2, type3, potential)
Defines angular potential for interaction between particles of types type1-type2-type3.
:param type1: Type of particle 1.
:type type1: int
:param type2: Type of particle 2.
:type type2: int
:param type3: Type of particle 3.
:type type3: int
:param potential: The potential to set up.
:type potential: espressopp.interaction.AngularPotential
"""
from espressopp import pmi
from espressopp.esutil import *
from espressopp.interaction.AngularPotential import *
from espressopp.interaction.Interaction import *
from _espressopp import interaction_TabulatedSubEnsAngular, \
interaction_FixedTripleListTabulatedSubEnsAngular, \
interaction_FixedTripleListTypesTabulatedSubEnsAngular
class TabulatedSubEnsAngularLocal(AngularPotentialLocal, interaction_TabulatedSubEnsAngular):
def __init__(self):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_TabulatedSubEnsAngular)
class FixedTripleListTabulatedSubEnsAngularLocal(InteractionLocal, interaction_FixedTripleListTabulatedSubEnsAngular):
def __init__(self, system, ftl, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
cxxinit(self, interaction_FixedTripleListTabulatedSubEnsAngular, system, ftl, potential)
def setPotential(self, potential):
if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup():
self.cxxclass.setPotential(self, potential)
class FixedTripleListTypesTabulatedSubEnsAngularLocal(InteractionLocal, interaction_FixedTripleListTypesTabulatedSubEnsAngular):
def __init__(self, system, ftl):
if pmi.workerIsActive():
cxxinit(self, interaction_FixedTripleListTypesTabulatedSubEnsAngular, system, ftl)
def setPotential(self, type1, type2, type3, potential):
if pmi.workerIsActive():
self.cxxclass.setPotential(self, type1, type2, type3, potential)
def getPotential(self, type1, type2, type3):
if pmi.workerIsActive():
return self.cxxclass.getPotential(self, type1, type2, type3)
def setFixedTripleList(self, ftl):
if pmi.workerIsActive():
self.cxxclass.setFixedTripleList(self, ftl)
def getFixedTripleList(self):
if pmi.workerIsActive():
return self.cxxclass.getFixedTripleList(self)
if pmi.isController:
class TabulatedSubEnsAngular(AngularPotential):
'The TabulatedSubEnsAngular potential.'
pmiproxydefs = dict(
cls = 'espressopp.interaction.TabulatedSubEnsAngularLocal',
pmicall = ['weight_get', 'weight_set',
'alpha_get', 'alpha_set', 'targetProb_get', 'targetProb_set',
'colVarSd_get', 'colVarSd_set',
'dimension_get', 'filenames_get', 'filename_get',
'filename_set', 'addInteraction', 'colVarRefs_get',
'colVarRef_get']
)
class FixedTripleListTabulatedSubEnsAngular(Interaction):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.interaction.FixedTripleListTabulatedSubEnsAngularLocal',
pmicall = ['setPotential', 'getFixedTripleList']
)
class FixedTripleListTypesTabulatedSubEnsAngular(Interaction):
__metaclass__ = pmi.Proxy
pmiproxydefs = dict(
cls = 'espressopp.interaction.FixedTripleListTypesTabulatedSubEnsAngularLocal',
pmicall = ['setPotential','getPotential', 'setFixedTripleList', 'getFixedTripleList']
)
| MrTheodor/espressopp | src/interaction/TabulatedSubEnsAngular.py | Python | gpl-3.0 | 5,918 | 0.010645 |
# Generated by Django 3.0.9 on 2020-08-16 20:47
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('groups', '0042_auto_20200507_1258'),
('activities', '0021_remove_activity_feedback_as_sum'),
]
operations = [
migrations.CreateModel(
name='ActivityType',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='activity_types', to='groups.Group')),
('name', models.CharField(max_length=80)),
('colour', models.CharField(max_length=6)),
('icon', models.CharField(max_length=32)),
('feedback_icon', models.CharField(max_length=32)),
('has_feedback', models.BooleanField(default=True)),
('has_feedback_weight', models.BooleanField(default=True)),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='activity',
name='activity_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activities', to='activities.ActivityType'),
),
migrations.AddField(
model_name='activityseries',
name='activity_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='activity_series', to='activities.ActivityType'),
),
migrations.AlterUniqueTogether(
name='activitytype',
unique_together={('group', 'name')},
),
]
| yunity/foodsaving-backend | karrot/activities/migrations/0022_add_activity_types.py | Python | agpl-3.0 | 1,901 | 0.00263 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
import os
import logging
from functools import wraps
CONTENT_TYPE = {
'.jpg': 'image/jpeg',
'.jpeg': 'image/jpeg',
'.gif': 'image/gif',
'.png': 'image/png',
'.webp': 'image/webp',
'.mp4': 'video/mp4',
'.webm': 'video/webm',
'.svg': 'image/svg+xml',
}
EXTENSION = {
'image/jpeg': '.jpg',
'image/gif': '.gif',
'image/png': '.png',
'image/webp': '.webp',
'video/mp4': '.mp4',
'video/webm': '.webm',
'image/svg+xml': '.svg',
}
logger = logging.getLogger('thumbor')
class on_exception(object):
def __init__(self, callback, exception_class=Exception):
self.callback = callback
self.exception_class = exception_class
def __call__(self, fn):
def wrapper(*args, **kwargs):
self_instance = args[0] if len(args) > 0 else None
try:
return fn(*args, **kwargs)
except self.exception_class as exc_value:
if self.callback:
# Execute the callback and let it handle the exception
if self_instance:
return self.callback(
self_instance,
fn.__name__,
self.exception_class,
exc_value
)
else:
return self.callback(
fn.__name__,
self.exception_class,
exc_value
)
else:
raise
return wrapper
class deprecated(object):
def __init__(self, msg=None):
self.msg = ": {0}".format(msg) if msg else "."
def __call__(self, func):
@wraps(func)
def new_func(*args, **kwargs):
logger.warn(
"Deprecated function {0}{1}".format(func.__name__, self.msg)
)
return func(*args, **kwargs)
return new_func
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
| BetterCollective/thumbor | thumbor/utils.py | Python | mit | 2,733 | 0 |
import pygame
import rospy
import time
from std_msgs.msg import Float64
from std_msgs.msg import Float64MultiArray
#pygame setup
pygame.init()
pygame.display.set_mode([100,100])
delay = 100
interval = 50
pygame.key.set_repeat(delay, interval)
#really this should be passed in or something but for now if you want to change the name just do it here
robot_namespace = "qubo/"
effort = 50
num_thrusters = 8
rospy.init_node('keyboard_node', anonymous=False)
#rospy spins all these up in their own thread, no need to call spin()
roll_pub = rospy.Publisher(robot_namespace + "roll_cmd" , Float64, queue_size = 10 )
pitch_pub = rospy.Publisher(robot_namespace + "pitch_cmd" , Float64, queue_size = 10 )
yaw_pub = rospy.Publisher(robot_namespace + "yaw_cmd" , Float64, queue_size = 10 )
depth_pub = rospy.Publisher(robot_namespace + "depth_cmd" , Float64, queue_size = 10 )
surge_pub = rospy.Publisher(robot_namespace + "surge_cmd" , Float64, queue_size = 10 )
sway_pub = rospy.Publisher(robot_namespace + "sway_cmd" , Float64, queue_size = 10 )
thruster_pub = rospy.Publisher(robot_namespace + "thruster_cmds" , Float64MultiArray, queue_size = 10)
thruster_msg = Float64MultiArray()
pygame.key.set_repeat(10,10)
while(True):
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
print event.key
keys_pressed = pygame.key.get_pressed()
sway = surge = yaw = depth = 0
thruster_msg.data = [0]*num_thrusters
if keys_pressed[pygame.K_a]:
sway_pub.publish(effort)
elif keys_pressed[pygame.K_d]:
sway_pub.publish(-effort)
if keys_pressed[pygame.K_w]:
surge_pub.publish(effort)
print "asdasd"
elif keys_pressed[pygame.K_s]:
surge_pub.publish(-effort)
if keys_pressed[pygame.K_q]:
yaw_pub.publish(effort)
elif keys_pressed[pygame.K_e]:
yaw_pub.publish(-effort)
if keys_pressed[pygame.K_r]:
depth_pub.publish(effort)
elif keys_pressed[pygame.K_f]:
depth_pub.publish(-effort)
if keys_pressed[pygame.K_MINUS]:
sign = -1
else:
sign = 1
#this only works because pygame.k_X is a number and k_0 - k_8 are contiguous
for i in range(0, 8):
if keys_pressed[pygame.K_0 + i]:
thruster_msg.data[i] = (effort*sign)
print thruster_msg.data
thruster_pub.publish(thruster_msg)
time.sleep(.05)
| robotics-at-maryland/qubo | src/teleop/src/keyboard_controller.py | Python | mit | 2,473 | 0.024262 |
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
import shoop.apps
class AppConfig(shoop.apps.AppConfig):
name = __name__
verbose_name = _("Simple CMS")
label = "shoop_simple_cms"
provides = {
"front_urls_post": [__name__ + ".urls:urlpatterns"],
"admin_module": [
"shoop.simple_cms.admin_module:SimpleCMSAdminModule"
],
"front_template_helper_namespace": [
"shoop.simple_cms.template_helpers:SimpleCMSTemplateHelpers"
]
}
default_app_config = __name__ + ".AppConfig"
| arth-co/shoop | shoop/simple_cms/__init__.py | Python | agpl-3.0 | 823 | 0 |
from datetime import timedelta
from decimal import Decimal
import os
import shutil
from whoosh.fields import TEXT, KEYWORD, NUMERIC, DATETIME, BOOLEAN
from whoosh.qparser import QueryParser
from django.conf import settings
from django.utils.datetime_safe import datetime, date
from django.test import TestCase
from haystack import connections, connection_router, reset_search_queries
from haystack import indexes
from haystack.inputs import AutoQuery
from haystack.models import SearchResult
from haystack.query import SearchQuerySet, SQ
from haystack.utils.loading import UnifiedIndex
from core.models import MockModel, AnotherMockModel, AFourthMockModel
from core.tests.mocks import MockSearchResult
class WhooshMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateField(model_attr='pub_date')
def get_model(self):
return MockModel
class WhooshAnotherMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateField(model_attr='pub_date')
def get_model(self):
return AnotherMockModel
def prepare_text(self, obj):
return obj.author
class AllTypesWhooshMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
name = indexes.CharField(model_attr='author', indexed=False)
pub_date = indexes.DateField(model_attr='pub_date')
sites = indexes.MultiValueField()
seen_count = indexes.IntegerField(indexed=False)
is_active = indexes.BooleanField(default=True)
def get_model(self):
return MockModel
class WhooshMaintainTypeMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True)
month = indexes.CharField(indexed=False)
pub_date = indexes.DateField(model_attr='pub_date')
def get_model(self):
return MockModel
def prepare_text(self, obj):
return "Indexed!\n%s" % obj.pk
def prepare_month(self, obj):
return "%02d" % obj.pub_date.month
class WhooshBoostMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(
document=True, use_template=True,
template_name='search/indexes/core/mockmodel_template.txt'
)
author = indexes.CharField(model_attr='author', weight=2.0)
editor = indexes.CharField(model_attr='editor')
pub_date = indexes.DateField(model_attr='pub_date')
def get_model(self):
return AFourthMockModel
def prepare(self, obj):
data = super(WhooshBoostMockSearchIndex, self).prepare(obj)
if obj.pk % 2 == 0:
data['boost'] = 2.0
return data
class WhooshAutocompleteMockModelSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(model_attr='foo', document=True)
name = indexes.CharField(model_attr='author')
pub_date = indexes.DateField(model_attr='pub_date')
text_auto = indexes.EdgeNgramField(model_attr='foo')
name_auto = indexes.EdgeNgramField(model_attr='author')
def get_model(self):
return MockModel
class WhooshSearchBackendTestCase(TestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(WhooshSearchBackendTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = MockModel.objects.all()
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.old_ui
super(WhooshSearchBackendTestCase, self).tearDown()
def whoosh_search(self, query):
self.raw_whoosh = self.raw_whoosh.refresh()
searcher = self.raw_whoosh.searcher()
return searcher.search(self.parser.parse(query), limit=1000)
def test_non_silent(self):
bad_sb = connections['default'].backend('bad', PATH='/tmp/bad_whoosh', SILENTLY_FAIL=False)
bad_sb.use_file_storage = False
bad_sb.storage = 'omg.wtf.bbq'
try:
bad_sb.update(self.wmmi, self.sample_objs)
self.fail()
except:
pass
try:
bad_sb.remove('core.mockmodel.1')
self.fail()
except:
pass
try:
bad_sb.clear()
self.fail()
except:
pass
try:
bad_sb.search('foo')
self.fail()
except:
pass
def test_update(self):
self.sb.update(self.wmmi, self.sample_objs)
# Check what Whoosh thinks is there.
self.assertEqual(len(self.whoosh_search(u'*')), 23)
self.assertEqual([doc.fields()['id'] for doc in self.whoosh_search(u'*')], [u'core.mockmodel.%s' % i for i in xrange(1, 24)])
def test_remove(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.remove(self.sample_objs[0])
self.assertEqual(self.sb.index.doc_count(), 22)
def test_clear(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear()
self.assertEqual(self.sb.index.doc_count(), 0)
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear([AnotherMockModel])
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear([MockModel])
self.assertEqual(self.sb.index.doc_count(), 0)
self.sb.index.refresh()
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(self.sb.index.doc_count(), 23)
self.sb.clear([AnotherMockModel, MockModel])
self.assertEqual(self.raw_whoosh.doc_count(), 0)
def test_search(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'*')), 23)
# No query string should always yield zero results.
self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []})
# A one letter query string gets nabbed by a stopwords filter. Should
# always yield zero results.
self.assertEqual(self.sb.search(u'a'), {'hits': 0, 'results': []})
# Possible AttributeError?
# self.assertEqual(self.sb.search(u'a b'), {'hits': 0, 'results': [], 'spelling_suggestion': '', 'facets': {}})
self.assertEqual(self.sb.search(u'*')['hits'], 23)
self.assertEqual([result.pk for result in self.sb.search(u'*')['results']], [u'%s' % i for i in xrange(1, 24)])
self.assertEqual(self.sb.search(u'', highlight=True), {'hits': 0, 'results': []})
self.assertEqual(self.sb.search(u'index*', highlight=True)['hits'], 23)
# DRL_FIXME: Uncomment once highlighting works.
# self.assertEqual([result.highlighted['text'][0] for result in self.sb.search('Index*', highlight=True)['results']], ['<em>Indexed</em>!\n3', '<em>Indexed</em>!\n2', '<em>Indexed</em>!\n1'])
self.assertEqual(self.sb.search(u'Indx')['hits'], 0)
self.assertEqual(self.sb.search(u'Indx')['spelling_suggestion'], u'index')
self.assertEqual(self.sb.search(u'', facets=['name']), {'hits': 0, 'results': []})
results = self.sb.search(u'Index*', facets=['name'])
results = self.sb.search(u'index*', facets=['name'])
self.assertEqual(results['hits'], 23)
self.assertEqual(results['facets'], {})
self.assertEqual(self.sb.search(u'', date_facets={'pub_date': {'start_date': date(2008, 2, 26), 'end_date': date(2008, 2, 26), 'gap': '/MONTH'}}), {'hits': 0, 'results': []})
results = self.sb.search(u'Index*', date_facets={'pub_date': {'start_date': date(2008, 2, 26), 'end_date': date(2008, 2, 26), 'gap': '/MONTH'}})
results = self.sb.search(u'index*', date_facets={'pub_date': {'start_date': date(2008, 2, 26), 'end_date': date(2008, 2, 26), 'gap': '/MONTH'}})
self.assertEqual(results['hits'], 23)
self.assertEqual(results['facets'], {})
self.assertEqual(self.sb.search(u'', query_facets={'name': '[* TO e]'}), {'hits': 0, 'results': []})
results = self.sb.search(u'Index*', query_facets={'name': '[* TO e]'})
results = self.sb.search(u'index*', query_facets={'name': '[* TO e]'})
self.assertEqual(results['hits'], 23)
self.assertEqual(results['facets'], {})
# self.assertEqual(self.sb.search('', narrow_queries=set(['name:daniel1'])), {'hits': 0, 'results': []})
# results = self.sb.search('Index*', narrow_queries=set(['name:daniel1']))
# self.assertEqual(results['hits'], 1)
# Ensure that swapping the ``result_class`` works.
self.assertTrue(isinstance(self.sb.search(u'Index*', result_class=MockSearchResult)['results'][0], MockSearchResult))
# Check the use of ``limit_to_registered_models``.
self.assertEqual(self.sb.search(u'', limit_to_registered_models=False), {'hits': 0, 'results': []})
self.assertEqual(self.sb.search(u'*', limit_to_registered_models=False)['hits'], 23)
self.assertEqual([result.pk for result in self.sb.search(u'*', limit_to_registered_models=False)['results']], [u'%s' % i for i in xrange(1, 24)])
# Stow.
old_limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = False
self.assertEqual(self.sb.search(u''), {'hits': 0, 'results': []})
self.assertEqual(self.sb.search(u'*')['hits'], 23)
self.assertEqual([result.pk for result in self.sb.search(u'*')['results']], [u'%s' % i for i in xrange(1, 24)])
# Restore.
settings.HAYSTACK_LIMIT_TO_REGISTERED_MODELS = old_limit_to_registered_models
def test_search_all_models(self):
wamsi = WhooshAnotherMockSearchIndex()
self.ui.build(indexes=[self.wmmi, wamsi])
self.sb.update(self.wmmi, self.sample_objs)
self.sb.update(wamsi, AnotherMockModel.objects.all())
self.assertEqual(len(self.whoosh_search(u'*')), 25)
self.ui.build(indexes=[self.wmmi])
def test_more_like_this(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'*')), 23)
# Now supported by Whoosh (as of 1.8.4). See the ``LiveWhooshMoreLikeThisTestCase``.
self.assertEqual(self.sb.more_like_this(self.sample_objs[0])['hits'], 22)
# Make sure that swapping the ``result_class`` doesn't blow up.
try:
self.sb.more_like_this(self.sample_objs[0], result_class=MockSearchResult)
except:
self.fail()
def test_delete_index(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertTrue(self.sb.index.doc_count() > 0)
self.sb.delete_index()
self.assertEqual(self.sb.index.doc_count(), 0)
def test_order_by(self):
self.sb.update(self.wmmi, self.sample_objs)
results = self.sb.search(u'*', sort_by=['pub_date'])
self.assertEqual([result.pk for result in results['results']], [u'1', u'3', u'2', u'4', u'5', u'6', u'7', u'8', u'9', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'20', u'21', u'22', u'23'])
results = self.sb.search(u'*', sort_by=['-pub_date'])
self.assertEqual([result.pk for result in results['results']], [u'23', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'9', u'8', u'7', u'6', u'5', u'4', u'2', u'3', u'1'])
results = self.sb.search(u'*', sort_by=['id'])
self.assertEqual([result.pk for result in results['results']], [u'1', u'10', u'11', u'12', u'13', u'14', u'15', u'16', u'17', u'18', u'19', u'2', u'20', u'21', u'22', u'23', u'3', u'4', u'5', u'6', u'7', u'8', u'9'])
results = self.sb.search(u'*', sort_by=['-id'])
self.assertEqual([result.pk for result in results['results']], [u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'23', u'22', u'21', u'20', u'2', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'1'])
def test__from_python(self):
self.assertEqual(self.sb._from_python('abc'), u'abc')
self.assertEqual(self.sb._from_python(1), 1)
self.assertEqual(self.sb._from_python(2653), 2653)
self.assertEqual(self.sb._from_python(25.5), 25.5)
self.assertEqual(self.sb._from_python([1, 2, 3]), u'1,2,3')
self.assertEqual(self.sb._from_python({'a': 1, 'c': 3, 'b': 2}), u"{'a': 1, 'c': 3, 'b': 2}")
self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 16, 14)), datetime(2009, 5, 9, 16, 14))
self.assertEqual(self.sb._from_python(datetime(2009, 5, 9, 0, 0)), datetime(2009, 5, 9, 0, 0))
self.assertEqual(self.sb._from_python(datetime(1899, 5, 18, 0, 0)), datetime(1899, 5, 18, 0, 0))
self.assertEqual(self.sb._from_python(datetime(2009, 5, 18, 1, 16, 30, 250)), datetime(2009, 5, 18, 1, 16, 30, 250))
def test__to_python(self):
self.assertEqual(self.sb._to_python('abc'), 'abc')
self.assertEqual(self.sb._to_python('1'), 1)
self.assertEqual(self.sb._to_python('2653'), 2653)
self.assertEqual(self.sb._to_python('25.5'), 25.5)
self.assertEqual(self.sb._to_python('[1, 2, 3]'), [1, 2, 3])
self.assertEqual(self.sb._to_python('{"a": 1, "b": 2, "c": 3}'), {'a': 1, 'c': 3, 'b': 2})
self.assertEqual(self.sb._to_python('2009-05-09T16:14:00'), datetime(2009, 5, 9, 16, 14))
self.assertEqual(self.sb._to_python('2009-05-09T00:00:00'), datetime(2009, 5, 9, 0, 0))
self.assertEqual(self.sb._to_python(None), None)
def test_range_queries(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u'[d TO]')), 23)
self.assertEqual(len(self.whoosh_search(u'name:[d TO]')), 23)
self.assertEqual(len(self.whoosh_search(u'Ind* AND name:[d to]')), 23)
self.assertEqual(len(self.whoosh_search(u'Ind* AND name:[to c]')), 0)
def test_date_queries(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u"pub_date:20090717003000")), 1)
self.assertEqual(len(self.whoosh_search(u"pub_date:20090717000000")), 0)
self.assertEqual(len(self.whoosh_search(u'Ind* AND pub_date:[to 20090717003000]')), 3)
def test_escaped_characters_queries(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertEqual(len(self.whoosh_search(u"Indexed\!")), 23)
self.assertEqual(len(self.whoosh_search(u"http\:\/\/www\.example\.com")), 0)
def test_build_schema(self):
ui = UnifiedIndex()
ui.build(indexes=[AllTypesWhooshMockSearchIndex()])
(content_field_name, schema) = self.sb.build_schema(ui.all_searchfields())
self.assertEqual(content_field_name, 'text')
self.assertEqual(len(schema.names()), 9)
self.assertEqual(schema.names(), ['django_ct', 'django_id', 'id', 'is_active', 'name', 'pub_date', 'seen_count', 'sites', 'text'])
self.assertTrue(isinstance(schema._fields['text'], TEXT))
self.assertTrue(isinstance(schema._fields['pub_date'], DATETIME))
self.assertTrue(isinstance(schema._fields['seen_count'], NUMERIC))
self.assertTrue(isinstance(schema._fields['sites'], KEYWORD))
self.assertTrue(isinstance(schema._fields['is_active'], BOOLEAN))
def test_verify_type(self):
old_ui = connections['default'].get_unified_index()
ui = UnifiedIndex()
wmtmmi = WhooshMaintainTypeMockSearchIndex()
ui.build(indexes=[wmtmmi])
connections['default']._index = ui
sb = connections['default'].get_backend()
sb.setup()
sb.update(wmtmmi, self.sample_objs)
self.assertEqual(sb.search(u'*')['hits'], 23)
self.assertEqual([result.month for result in sb.search(u'*')['results']], [u'06', u'07', u'06', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07', u'07'])
connections['default']._index = old_ui
def test_writable(self):
if getattr(settings, 'HAYSTACK_WHOOSH_STORAGE', 'file') == 'file':
if not os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
os.makedirs(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
os.chmod(settings.HAYSTACK_CONNECTIONS['default']['PATH'], 0400)
try:
self.sb.setup()
self.fail()
except IOError:
# Yay. We failed
pass
os.chmod(settings.HAYSTACK_CONNECTIONS['default']['PATH'], 0755)
def test_slicing(self):
self.sb.update(self.wmmi, self.sample_objs)
page_1 = self.sb.search(u'*', start_offset=0, end_offset=20)
page_2 = self.sb.search(u'*', start_offset=20, end_offset=30)
self.assertEqual(len(page_1['results']), 20)
self.assertEqual([result.pk for result in page_1['results']], [u'%s' % i for i in xrange(1, 21)])
self.assertEqual(len(page_2['results']), 3)
self.assertEqual([result.pk for result in page_2['results']], [u'21', u'22', u'23'])
# This used to throw an error.
page_0 = self.sb.search(u'*', start_offset=0, end_offset=0)
self.assertEqual(len(page_0['results']), 1)
def test_scoring(self):
self.sb.update(self.wmmi, self.sample_objs)
page_1 = self.sb.search(u'index', start_offset=0, end_offset=20)
page_2 = self.sb.search(u'index', start_offset=20, end_offset=30)
self.assertEqual(len(page_1['results']), 20)
self.assertEqual(["%0.2f" % result.score for result in page_1['results']], ['0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.51', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40', '0.40'])
self.assertEqual(len(page_2['results']), 3)
self.assertEqual(["%0.2f" % result.score for result in page_2['results']], ['0.40', '0.40', '0.40'])
class WhooshBoostBackendTestCase(TestCase):
def setUp(self):
super(WhooshBoostBackendTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshBoostMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = []
for i in xrange(1, 5):
mock = AFourthMockModel()
mock.id = i
if i % 2:
mock.author = 'daniel'
mock.editor = 'david'
else:
mock.author = 'david'
mock.editor = 'daniel'
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
self.sample_objs.append(mock)
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.ui
super(WhooshBoostBackendTestCase, self).tearDown()
def test_boost(self):
self.sb.update(self.wmmi, self.sample_objs)
self.raw_whoosh = self.raw_whoosh.refresh()
searcher = self.raw_whoosh.searcher()
self.assertEqual(len(searcher.search(self.parser.parse(u'*'), limit=1000)), 2)
results = SearchQuerySet().filter(SQ(author='daniel') | SQ(editor='daniel'))
self.assertEqual([result.id for result in results], [
'core.afourthmockmodel.1',
'core.afourthmockmodel.3',
])
self.assertEqual(results[0].boost, 1.1)
class LiveWhooshSearchQueryTestCase(TestCase):
def setUp(self):
super(LiveWhooshSearchQueryTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wmtmmi = WhooshMaintainTypeMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = []
for i in xrange(1, 4):
mock = MockModel()
mock.id = i
mock.author = 'daniel%s' % i
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
self.sample_objs.append(mock)
self.sq = connections['default'].get_query()
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.old_ui
super(LiveWhooshSearchQueryTestCase, self).tearDown()
def test_get_spelling(self):
self.sb.update(self.wmmi, self.sample_objs)
self.sq.add_filter(SQ(content='Indx'))
self.assertEqual(self.sq.get_spelling_suggestion(), u'index')
def test_log_query(self):
from django.conf import settings
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
# Stow.
old_debug = settings.DEBUG
settings.DEBUG = False
len(self.sq.get_results())
self.assertEqual(len(connections['default'].queries), 0)
settings.DEBUG = True
# Redefine it to clear out the cached results.
self.sq = connections['default'].get_query()
self.sq.add_filter(SQ(name='bar'))
len(self.sq.get_results())
self.assertEqual(len(connections['default'].queries), 1)
self.assertEqual(connections['default'].queries[0]['query_string'], 'name:(bar)')
# And again, for good measure.
self.sq = connections['default'].get_query()
self.sq.add_filter(SQ(name='baz'))
self.sq.add_filter(SQ(text='foo'))
len(self.sq.get_results())
self.assertEqual(len(connections['default'].queries), 2)
self.assertEqual(connections['default'].queries[0]['query_string'], 'name:(bar)')
self.assertEqual(connections['default'].queries[1]['query_string'], u'(name:(baz) AND text:(foo))')
# Restore.
settings.DEBUG = old_debug
class LiveWhooshSearchQuerySetTestCase(TestCase):
def setUp(self):
super(LiveWhooshSearchQuerySetTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.ui.build(indexes=[self.wmmi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
# Stow.
self.old_debug = settings.DEBUG
settings.DEBUG = True
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sample_objs = []
for i in xrange(1, 4):
mock = MockModel()
mock.id = i
mock.author = 'daniel%s' % i
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
self.sample_objs.append(mock)
self.sq = connections['default'].get_query()
self.sqs = SearchQuerySet()
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.old_ui
super(LiveWhooshSearchQuerySetTestCase, self).tearDown()
def test_various_searchquerysets(self):
self.sb.update(self.wmmi, self.sample_objs)
sqs = self.sqs.filter(content='Index')
self.assertEqual(sqs.query.build_query(), u'(Index)')
self.assertEqual(len(sqs), 3)
sqs = self.sqs.auto_query('Indexed!')
self.assertEqual(sqs.query.build_query(), u"('Indexed!')")
self.assertEqual(len(sqs), 3)
sqs = self.sqs.auto_query('Indexed!').filter(pub_date__lte=date(2009, 8, 31))
self.assertEqual(sqs.query.build_query(), u"(('Indexed!') AND pub_date:([to 20090831000000]))")
self.assertEqual(len(sqs), 3)
sqs = self.sqs.auto_query('Indexed!').filter(pub_date__lte=date(2009, 2, 23))
self.assertEqual(sqs.query.build_query(), u"(('Indexed!') AND pub_date:([to 20090223000000]))")
self.assertEqual(len(sqs), 2)
sqs = self.sqs.auto_query('Indexed!').filter(pub_date__lte=date(2009, 2, 25)).filter(django_id__in=[1, 2]).exclude(name='daniel1')
self.assertEqual(sqs.query.build_query(), u'((\'Indexed!\') AND pub_date:([to 20090225000000]) AND django_id:(1 OR 2) AND NOT (name:(daniel1)))')
self.assertEqual(len(sqs), 1)
sqs = self.sqs.auto_query('re-inker')
self.assertEqual(sqs.query.build_query(), u"('re-inker')")
self.assertEqual(len(sqs), 0)
sqs = self.sqs.auto_query('0.7 wire')
self.assertEqual(sqs.query.build_query(), u"('0.7' wire)")
self.assertEqual(len(sqs), 0)
sqs = self.sqs.auto_query("daler-rowney pearlescent 'bell bronze'")
self.assertEqual(sqs.query.build_query(), u"('daler-rowney' pearlescent 'bell bronze')")
self.assertEqual(len(sqs), 0)
sqs = self.sqs.models(MockModel)
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 3)
def test_all_regression(self):
sqs = SearchQuerySet()
self.assertEqual([result.pk for result in sqs], [])
self.sb.update(self.wmmi, self.sample_objs)
self.assertTrue(self.sb.index.doc_count() > 0)
sqs = SearchQuerySet()
self.assertEqual(len(sqs), 3)
self.assertEqual(sorted([result.pk for result in sqs]), [u'1', u'2', u'3'])
try:
sqs = repr(SearchQuerySet())
except:
self.fail()
def test_regression_space_query(self):
self.sb.update(self.wmmi, self.sample_objs)
self.assertTrue(self.sb.index.doc_count() > 0)
sqs = SearchQuerySet().auto_query(" ")
self.assertEqual(len(sqs), 3)
sqs = SearchQuerySet().filter(content=" ")
self.assertEqual(len(sqs), 0)
def test_iter(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
sqs = self.sqs.auto_query('Indexed!')
results = [int(result.pk) for result in sqs]
self.assertEqual(sorted(results), [1, 2, 3])
self.assertEqual(len(connections['default'].queries), 1)
def test_slice(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
results = self.sqs.auto_query('Indexed!')
self.assertEqual(sorted([int(result.pk) for result in results[1:3]]), [1, 2])
self.assertEqual(len(connections['default'].queries), 1)
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
results = self.sqs.auto_query('Indexed!')
self.assertEqual(int(results[0].pk), 1)
self.assertEqual(len(connections['default'].queries), 1)
def test_manual_iter(self):
self.sb.update(self.wmmi, self.sample_objs)
results = self.sqs.auto_query('Indexed!')
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
results = [int(result.pk) for result in results._manual_iter()]
self.assertEqual(sorted(results), [1, 2, 3])
self.assertEqual(len(connections['default'].queries), 1)
def test_fill_cache(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
results = self.sqs.auto_query('Indexed!')
self.assertEqual(len(results._result_cache), 0)
self.assertEqual(len(connections['default'].queries), 0)
results._fill_cache(0, 10)
self.assertEqual(len([result for result in results._result_cache if result is not None]), 3)
self.assertEqual(len(connections['default'].queries), 1)
results._fill_cache(10, 20)
self.assertEqual(len([result for result in results._result_cache if result is not None]), 3)
self.assertEqual(len(connections['default'].queries), 2)
def test_cache_is_full(self):
self.sb.update(self.wmmi, self.sample_objs)
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
self.assertEqual(self.sqs._cache_is_full(), False)
results = self.sqs.auto_query('Indexed!')
fire_the_iterator_and_fill_cache = [result for result in results]
self.assertEqual(results._cache_is_full(), True)
self.assertEqual(len(connections['default'].queries), 1)
def test_count(self):
more_samples = []
for i in xrange(1, 50):
mock = MockModel()
mock.id = i
mock.author = 'daniel%s' % i
mock.pub_date = date(2009, 2, 25) - timedelta(days=i)
more_samples.append(mock)
self.sb.update(self.wmmi, more_samples)
reset_search_queries()
self.assertEqual(len(connections['default'].queries), 0)
results = self.sqs.all()
self.assertEqual(len(results), 49)
self.assertEqual(results._cache_is_full(), False)
self.assertEqual(len(connections['default'].queries), 1)
def test_query_generation(self):
sqs = self.sqs.filter(SQ(content=AutoQuery("hello world")) | SQ(title=AutoQuery("hello world")))
self.assertEqual(sqs.query.build_query(), u"((hello world) OR title:(hello world))")
def test_result_class(self):
self.sb.update(self.wmmi, self.sample_objs)
# Assert that we're defaulting to ``SearchResult``.
sqs = self.sqs.all()
self.assertTrue(isinstance(sqs[0], SearchResult))
# Custom class.
sqs = self.sqs.result_class(MockSearchResult).all()
self.assertTrue(isinstance(sqs[0], MockSearchResult))
# Reset to default.
sqs = self.sqs.result_class(None).all()
self.assertTrue(isinstance(sqs[0], SearchResult))
class LiveWhooshMultiSearchQuerySetTestCase(TestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(LiveWhooshMultiSearchQuerySetTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wamsi = WhooshAnotherMockSearchIndex()
self.ui.build(indexes=[self.wmmi, self.wamsi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.wmmi.update()
self.wamsi.update()
self.sqs = SearchQuerySet()
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.old_ui
super(LiveWhooshMultiSearchQuerySetTestCase, self).tearDown()
def test_searchquerysets_with_models(self):
sqs = self.sqs.all()
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 25)
sqs = self.sqs.models(MockModel)
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 23)
sqs = self.sqs.models(AnotherMockModel)
self.assertEqual(sqs.query.build_query(), u'*')
self.assertEqual(len(sqs), 2)
class LiveWhooshMoreLikeThisTestCase(TestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(LiveWhooshMoreLikeThisTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wmmi = WhooshMockSearchIndex()
self.wamsi = WhooshAnotherMockSearchIndex()
self.ui.build(indexes=[self.wmmi, self.wamsi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.wmmi.update()
self.wamsi.update()
self.sqs = SearchQuerySet()
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.old_ui
super(LiveWhooshMoreLikeThisTestCase, self).tearDown()
def test_more_like_this(self):
mlt = self.sqs.more_like_this(MockModel.objects.get(pk=22))
self.assertEqual(mlt.count(), 22)
self.assertEqual(sorted([result.pk for result in mlt]), sorted([u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'11', u'10', u'23']))
self.assertEqual(len([result.pk for result in mlt]), 22)
alt_mlt = self.sqs.filter(name='daniel3').more_like_this(MockModel.objects.get(pk=13))
self.assertEqual(alt_mlt.count(), 8)
self.assertEqual(sorted([result.pk for result in alt_mlt]), sorted([u'4', u'3', u'22', u'19', u'17', u'16', u'10', u'23']))
self.assertEqual(len([result.pk for result in alt_mlt]), 8)
alt_mlt_with_models = self.sqs.models(MockModel).more_like_this(MockModel.objects.get(pk=11))
self.assertEqual(alt_mlt_with_models.count(), 22)
self.assertEqual(sorted([result.pk for result in alt_mlt_with_models]), sorted([u'9', u'8', u'7', u'6', u'5', u'4', u'3', u'2', u'1', u'22', u'21', u'20', u'19', u'18', u'17', u'16', u'15', u'14', u'13', u'12', u'10', u'23']))
self.assertEqual(len([result.pk for result in alt_mlt_with_models]), 22)
if hasattr(MockModel.objects, 'defer'):
# Make sure MLT works with deferred bits.
mi = MockModel.objects.defer('foo').get(pk=21)
self.assertEqual(mi._deferred, True)
deferred = self.sqs.models(MockModel).more_like_this(mi)
self.assertEqual(deferred.count(), 0)
self.assertEqual([result.pk for result in deferred], [])
self.assertEqual(len([result.pk for result in deferred]), 0)
# Ensure that swapping the ``result_class`` works.
self.assertTrue(isinstance(self.sqs.result_class(MockSearchResult).more_like_this(MockModel.objects.get(pk=21))[0], MockSearchResult))
class LiveWhooshAutocompleteTestCase(TestCase):
fixtures = ['bulk_data.json']
def setUp(self):
super(LiveWhooshAutocompleteTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wacsi = WhooshAutocompleteMockModelSearchIndex()
self.ui.build(indexes=[self.wacsi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
# Stow.
import haystack
self.old_debug = settings.DEBUG
settings.DEBUG = True
self.sb.setup()
self.sqs = SearchQuerySet()
# Wipe it clean.
self.sqs.query.backend.clear()
for mock in MockModel.objects.all():
self.wacsi.update_object(mock)
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
connections['default']._index = self.old_ui
settings.DEBUG = self.old_debug
super(LiveWhooshAutocompleteTestCase, self).tearDown()
def test_autocomplete(self):
autocomplete = self.sqs.autocomplete(text_auto='mod')
self.assertEqual(autocomplete.count(), 5)
self.assertEqual([result.pk for result in autocomplete], [u'1', u'12', u'6', u'7', u'14'])
self.assertTrue('mod' in autocomplete[0].text.lower())
self.assertTrue('mod' in autocomplete[1].text.lower())
self.assertTrue('mod' in autocomplete[2].text.lower())
self.assertTrue('mod' in autocomplete[3].text.lower())
self.assertTrue('mod' in autocomplete[4].text.lower())
self.assertEqual(len([result.pk for result in autocomplete]), 5)
def test_edgengram_regression(self):
autocomplete = self.sqs.autocomplete(text_auto='ngm')
self.assertEqual(autocomplete.count(), 0)
class WhooshRoundTripSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, default='')
name = indexes.CharField()
is_active = indexes.BooleanField()
post_count = indexes.IntegerField()
average_rating = indexes.FloatField()
price = indexes.DecimalField()
pub_date = indexes.DateField()
created = indexes.DateTimeField()
tags = indexes.MultiValueField()
sites = indexes.MultiValueField()
# For a regression involving lists with nothing in them.
empty_list = indexes.MultiValueField()
def get_model(self):
return MockModel
def prepare(self, obj):
prepped = super(WhooshRoundTripSearchIndex, self).prepare(obj)
prepped.update({
'text': 'This is some example text.',
'name': 'Mister Pants',
'is_active': True,
'post_count': 25,
'average_rating': 3.6,
'price': Decimal('24.99'),
'pub_date': date(2009, 11, 21),
'created': datetime(2009, 11, 21, 21, 31, 00),
'tags': ['staff', 'outdoor', 'activist', 'scientist'],
'sites': [3, 5, 1],
'empty_list': [],
})
return prepped
class LiveWhooshRoundTripTestCase(TestCase):
def setUp(self):
super(LiveWhooshRoundTripTestCase, self).setUp()
# Stow.
temp_path = os.path.join('tmp', 'test_whoosh_query')
self.old_whoosh_path = settings.HAYSTACK_CONNECTIONS['default']['PATH']
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = temp_path
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wrtsi = WhooshRoundTripSearchIndex()
self.ui.build(indexes=[self.wrtsi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
self.old_debug = settings.DEBUG
settings.DEBUG = True
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sb.delete_index()
self.sqs = SearchQuerySet()
# Wipe it clean.
self.sqs.query.backend.clear()
# Fake indexing.
mock = MockModel()
mock.id = 1
self.sb.update(self.wrtsi, [mock])
def tearDown(self):
if os.path.exists(settings.HAYSTACK_CONNECTIONS['default']['PATH']):
shutil.rmtree(settings.HAYSTACK_CONNECTIONS['default']['PATH'])
settings.HAYSTACK_CONNECTIONS['default']['PATH'] = self.old_whoosh_path
settings.DEBUG = self.old_debug
super(LiveWhooshRoundTripTestCase, self).tearDown()
def test_round_trip(self):
results = self.sqs.filter(id='core.mockmodel.1')
# Sanity check.
self.assertEqual(results.count(), 1)
# Check the individual fields.
result = results[0]
self.assertEqual(result.id, 'core.mockmodel.1')
self.assertEqual(result.text, 'This is some example text.')
self.assertEqual(result.name, 'Mister Pants')
self.assertEqual(result.is_active, True)
self.assertEqual(result.post_count, 25)
self.assertEqual(result.average_rating, 3.6)
self.assertEqual(result.price, u'24.99')
self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
self.assertEqual(result.tags, ['staff', 'outdoor', 'activist', 'scientist'])
self.assertEqual(result.sites, [u'3', u'5', u'1'])
self.assertEqual(result.empty_list, [])
# Check boolean filtering...
results = self.sqs.filter(id='core.mockmodel.1', is_active=True)
self.assertEqual(results.count(), 1)
class LiveWhooshRamStorageTestCase(TestCase):
def setUp(self):
super(LiveWhooshRamStorageTestCase, self).setUp()
# Stow.
self.old_whoosh_storage = settings.HAYSTACK_CONNECTIONS['default'].get('STORAGE', 'file')
settings.HAYSTACK_CONNECTIONS['default']['STORAGE'] = 'ram'
self.old_ui = connections['default'].get_unified_index()
self.ui = UnifiedIndex()
self.wrtsi = WhooshRoundTripSearchIndex()
self.ui.build(indexes=[self.wrtsi])
self.sb = connections['default'].get_backend()
connections['default']._index = self.ui
# Stow.
import haystack
self.old_debug = settings.DEBUG
settings.DEBUG = True
self.sb.setup()
self.raw_whoosh = self.sb.index
self.parser = QueryParser(self.sb.content_field_name, schema=self.sb.schema)
self.sqs = SearchQuerySet()
# Wipe it clean.
self.sqs.query.backend.clear()
# Fake indexing.
mock = MockModel()
mock.id = 1
self.sb.update(self.wrtsi, [mock])
def tearDown(self):
self.sqs.query.backend.clear()
settings.HAYSTACK_CONNECTIONS['default']['STORAGE'] = self.old_whoosh_storage
connections['default']._index = self.old_ui
settings.DEBUG = self.old_debug
super(LiveWhooshRamStorageTestCase, self).tearDown()
def test_ram_storage(self):
results = self.sqs.filter(id='core.mockmodel.1')
# Sanity check.
self.assertEqual(results.count(), 1)
# Check the individual fields.
result = results[0]
self.assertEqual(result.id, 'core.mockmodel.1')
self.assertEqual(result.text, 'This is some example text.')
self.assertEqual(result.name, 'Mister Pants')
self.assertEqual(result.is_active, True)
self.assertEqual(result.post_count, 25)
self.assertEqual(result.average_rating, 3.6)
self.assertEqual(result.pub_date, datetime(2009, 11, 21, 0, 0))
self.assertEqual(result.created, datetime(2009, 11, 21, 21, 31, 00))
self.assertEqual(result.tags, ['staff', 'outdoor', 'activist', 'scientist'])
self.assertEqual(result.sites, [u'3', u'5', u'1'])
self.assertEqual(result.empty_list, [])
| ericholscher/django-haystack | tests/whoosh_tests/tests/whoosh_backend.py | Python | bsd-3-clause | 46,194 | 0.002165 |
#!/usr/bin/env python3
"""Combine logs from multiple bitcore nodes as well as the test_framework log.
This streams the combined log output to stdout. Use combine_logs.py > outputfile
to write to an outputfile."""
import argparse
from collections import defaultdict, namedtuple
import heapq
import itertools
import os
import re
import sys
# Matches on the date format at the start of the log event
TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}")
LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event'])
def main():
"""Main function. Parses args, reads the log files and renders them as text or html."""
parser = argparse.ArgumentParser(usage='%(prog)s [options] <test temporary directory>', description=__doc__)
parser.add_argument('-c', '--color', dest='color', action='store_true', help='outputs the combined log with events colored by source (requires posix terminal colors. Use less -r for viewing)')
parser.add_argument('--html', dest='html', action='store_true', help='outputs the combined log as html. Requires jinja2. pip install jinja2')
args, unknown_args = parser.parse_known_args()
if args.color and os.name != 'posix':
print("Color output requires posix terminal colors.")
sys.exit(1)
if args.html and args.color:
print("Only one out of --color or --html should be specified")
sys.exit(1)
# There should only be one unknown argument - the path of the temporary test directory
if len(unknown_args) != 1:
print("Unexpected arguments" + str(unknown_args))
sys.exit(1)
log_events = read_logs(unknown_args[0])
print_logs(log_events, color=args.color, html=args.html)
def read_logs(tmp_dir):
"""Reads log files.
Delegates to generator function get_log_events() to provide individual log events
for each of the input log files."""
files = [("test", "%s/test_framework.log" % tmp_dir)]
for i in itertools.count():
logfile = "{}/node{}/regtest/debug.log".format(tmp_dir, i)
if not os.path.isfile(logfile):
break
files.append(("node%d" % i, logfile))
return heapq.merge(*[get_log_events(source, f) for source, f in files])
def get_log_events(source, logfile):
"""Generator function that returns individual log events.
Log events may be split over multiple lines. We use the timestamp
regex match as the marker for a new log event."""
try:
with open(logfile, 'r') as infile:
event = ''
timestamp = ''
for line in infile:
# skip blank lines
if line == '\n':
continue
# if this line has a timestamp, it's the start of a new log event.
time_match = TIMESTAMP_PATTERN.match(line)
if time_match:
if event:
yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip())
event = line
timestamp = time_match.group()
# if it doesn't have a timestamp, it's a continuation line of the previous log.
else:
event += "\n" + line
# Flush the final event
yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip())
except FileNotFoundError:
print("File %s could not be opened. Continuing without it." % logfile, file=sys.stderr)
def print_logs(log_events, color=False, html=False):
"""Renders the iterator of log events into text or html."""
if not html:
colors = defaultdict(lambda: '')
if color:
colors["test"] = "\033[0;36m" # CYAN
colors["node0"] = "\033[0;34m" # BLUE
colors["node1"] = "\033[0;32m" # GREEN
colors["node2"] = "\033[0;31m" # RED
colors["node3"] = "\033[0;33m" # YELLOW
colors["reset"] = "\033[0m" # Reset font color
for event in log_events:
print("{0} {1: <5} {2} {3}".format(colors[event.source.rstrip()], event.source, event.event, colors["reset"]))
else:
try:
import jinja2
except ImportError:
print("jinja2 not found. Try `pip install jinja2`")
sys.exit(1)
print(jinja2.Environment(loader=jinja2.FileSystemLoader('./'))
.get_template('combined_log_template.html')
.render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events]))
if __name__ == '__main__':
main()
| LIMXTEC/BitCore | test/functional/combine_logs.py | Python | mit | 4,611 | 0.004121 |
import models
def borradoPlan(sender, **kwargs):
borrada = kwargs['instance']
if borrada.tipo == "pr":
aBorrar = models.PlanPrepago.objects.filter(codplan=borrada)
else:
aBorrar = models.PlanPostpago.objects.filter(codplan=borrada)
aBorrar.delete()
def insertadoServicio(sender, **kwargs):
insertado = kwargs['instance']
print insertado
if insertado.id is not None:
nuevoPaq = models.Paquete(codpaq=insertado.codserv,nombrepaq=insertado.nombreserv + ' Paquete',precio=insertado.costo)
nuevoPaq.save()
nuevoContiene = models.Contiene(codpaq=nuevoPaq,codserv=insertado,cantidad=1)
nuevoContiene.save()
def borradoServicio(sender, **kwargs):
borrado = kwargs['instance']
if borrado.id is not None:
contieneBorrar = models.Contiene.objects.all().filter(codpaq=borrado.codserv)
contieneBorrar.delete()
paqBorrar = models.Paquete.objects.all().filter(codpaq=borrado.codserv)
paqBorrar.delete()
| gres147679/IngSoftwareRectaFinal | Tarea5/ServiSoft/ServiSoft/WebAccess/signalActions.py | Python | gpl-2.0 | 948 | 0.033755 |
#!/usr/bin/env python
"""
AMQP Clock
Fires off simple messages at one-minute intervals to a topic
exchange named 'clock', with the topic of the message being
the local time as 'year.month.date.dow.hour.minute',
for example: '2007.11.26.1.12.33', where the dow (day of week)
is 0 for Sunday, 1 for Monday, and so on (similar to Unix crontab).
A consumer could then bind a queue to the routing key '#.0'
for example to get a message at the beginning of each hour.
2007-11-26 Barry Pederson <bp@barryp.org>
"""
from datetime import datetime
from optparse import OptionParser
from time import sleep
import amqplib.client_0_8 as amqp
Message = amqp.Message
EXCHANGE_NAME = 'clock'
TOPIC_PATTERN = '%Y.%m.%d.%w.%H.%M' # Python datetime.strftime() pattern
def main():
parser = OptionParser()
parser.add_option('--host', dest='host',
help='AMQP server to connect to (default: %default)',
default='localhost')
parser.add_option('-u', '--userid', dest='userid',
help='AMQP userid to authenticate as (default: %default)',
default='guest')
parser.add_option('-p', '--password', dest='password',
help='AMQP password to authenticate with (default: %default)',
default='guest')
parser.add_option('--ssl', dest='ssl', action='store_true',
help='Enable SSL with AMQP server (default: not enabled)',
default=False)
options, args = parser.parse_args()
conn = amqp.Connection(options.host, options.userid, options.password)
ch = conn.channel()
ch.access_request('/data', write=True, active=True)
ch.exchange_declare(EXCHANGE_NAME, type='topic')
# Make sure our first message is close to the beginning
# of a minute
now = datetime.now()
if now.second > 0:
sleep(60 - now.second)
while True:
now = datetime.now()
msg = Message(timestamp=now)
msg_topic = now.strftime(TOPIC_PATTERN)
ch.basic_publish(msg, EXCHANGE_NAME, routing_key=msg_topic)
# Don't know how long the basic_publish took, so
# grab the time again.
now = datetime.now()
sleep(60 - now.second)
ch.close()
conn.close()
if __name__ == '__main__':
main()
| mzdaniel/oh-mainline | vendor/packages/amqplib/demo/amqp_clock.py | Python | agpl-3.0 | 2,344 | 0.005973 |
# Natural Language Toolkit: Applications package
#
# Copyright (C) 2001-2017 NLTK Project
# Author: Edward Loper <edloper@gmail.com>
# Steven Bird <stevenbird1@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Interactive NLTK Applications:
chartparser: Chart Parser
chunkparser: Regular-Expression Chunk Parser
collocations: Find collocations in text
concordance: Part-of-speech concordancer
nemo: Finding (and Replacing) Nemo regular expression tool
rdparser: Recursive Descent Parser
srparser: Shift-Reduce Parser
wordnet: WordNet Browser
"""
# Import Tkinter-based modules if Tkinter is installed
try:
from six.moves import tkinter
except ImportError:
import warnings
warnings.warn("nltk.app package not loaded "
"(please install Tkinter library).")
else:
from nltk.app.chartparser_app import app as chartparser
from nltk.app.chunkparser_app import app as chunkparser
from nltk.app.collocations_app import app as collocations
from nltk.app.concordance_app import app as concordance
from nltk.app.nemo_app import app as nemo
from nltk.app.rdparser_app import app as rdparser
from nltk.app.srparser_app import app as srparser
from nltk.app.wordnet_app import app as wordnet
try:
from matplotlib import pylab
except ImportError:
import warnings
warnings.warn("nltk.app.wordfreq not loaded "
"(requires the matplotlib library).")
else:
from nltk.app.wordfreq_app import app as wordfreq
# skip doctests from this package
def setup_module(module):
from nose import SkipTest
raise SkipTest("nltk.app examples are not doctests")
| arju88nair/projectCulminate | venv/lib/python3.5/site-packages/nltk/app/__init__.py | Python | apache-2.0 | 1,733 | 0.000577 |
# -*- coding: utf-8 -*-
###############################################################################
#
# CopyObject
# Makes a copy of an existing object in S3 Storage.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class CopyObject(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the CopyObject Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(CopyObject, self).__init__(temboo_session, '/Library/Amazon/S3/CopyObject')
def new_input_set(self):
return CopyObjectInputSet()
def _make_result_set(self, result, path):
return CopyObjectResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return CopyObjectChoreographyExecution(session, exec_id, path)
class CopyObjectInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the CopyObject
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AWSAccessKeyId(self, value):
"""
Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.)
"""
super(CopyObjectInputSet, self)._set_input('AWSAccessKeyId', value)
def set_AWSSecretKeyId(self, value):
"""
Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.)
"""
super(CopyObjectInputSet, self)._set_input('AWSSecretKeyId', value)
def set_BucketName(self, value):
"""
Set the value of the BucketName input for this Choreo. ((required, string) The name of the bucket that will be the file destination.)
"""
super(CopyObjectInputSet, self)._set_input('BucketName', value)
def set_CannedACL(self, value):
"""
Set the value of the CannedACL input for this Choreo. ((optional, string) By default all objects are private (only owner has full access control). Valid values: private, public-read, public-read-write, authenticated-read, bucket-owner-read, bucket-owner-full-control.)
"""
super(CopyObjectInputSet, self)._set_input('CannedACL', value)
def set_ContentType(self, value):
"""
Set the value of the ContentType input for this Choreo. ((optional, string) ContentType. Default is application/octet-stream.)
"""
super(CopyObjectInputSet, self)._set_input('ContentType', value)
def set_FileToCopy(self, value):
"""
Set the value of the FileToCopy input for this Choreo. ((required, string) The name of the file to copy.)
"""
super(CopyObjectInputSet, self)._set_input('FileToCopy', value)
def set_IfMatch(self, value):
"""
Set the value of the IfMatch input for this Choreo. ((optional, string) Copies the object if its entity tag (ETag) matches the specified tag; otherwise returns a 412 HTTP status code error (failed precondition).)
"""
super(CopyObjectInputSet, self)._set_input('IfMatch', value)
def set_IfModifiedSince(self, value):
"""
Set the value of the IfModifiedSince input for this Choreo. ((optional, date) Copies if it has been modified since the specified time; otherwise returns a 412 HTTP status code error (failed precondition). Must be valid HTTP date. Can be used with IfMatch only.)
"""
super(CopyObjectInputSet, self)._set_input('IfModifiedSince', value)
def set_IfNoneMatch(self, value):
"""
Set the value of the IfNoneMatch input for this Choreo. ((optional, string) Copies the object if its entity tag (ETag) is different from the specified tag; otherwise returns a 412 HTTP status code error (failed precondition).)
"""
super(CopyObjectInputSet, self)._set_input('IfNoneMatch', value)
def set_IfUnmodifiedSince(self, value):
"""
Set the value of the IfUnmodifiedSince input for this Choreo. ((optional, date) Copies if it hasn't been modified since the specified time; otherwise returns a 412 HTTP status code error (failed precondition). Must be valid HTTP date. Can be used with IfMatch or IfNoneMatch only.)
"""
super(CopyObjectInputSet, self)._set_input('IfUnmodifiedSince', value)
def set_NewFileName(self, value):
"""
Set the value of the NewFileName input for this Choreo. ((required, string) The file name for the new copy.)
"""
super(CopyObjectInputSet, self)._set_input('NewFileName', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".)
"""
super(CopyObjectInputSet, self)._set_input('ResponseFormat', value)
def set_SSECAlgorithm(self, value):
"""
Set the value of the SSECAlgorithm input for this Choreo. ((optional, string) Specifies the server-side encryption with customer-provided encryption keys (SSE-C) algorithm to use when Amazon S3 creates the target object. Valid value: AES256.)
"""
super(CopyObjectInputSet, self)._set_input('SSECAlgorithm', value)
def set_SSECKey(self, value):
"""
Set the value of the SSECKey input for this Choreo. ((optional, string) The customer-provided AES-256 256-bit (32-byte) encryption key for Amazon S3 to use to encrypt or decrypt your copied data object.)
"""
super(CopyObjectInputSet, self)._set_input('SSECKey', value)
def set_SSECSourceAlgorithm(self, value):
"""
Set the value of the SSECSourceAlgorithm input for this Choreo. ((optional, string) Specifies the server-side encryption with customer-provided encryption keys (SSE-C) algorithm to use to decrypt the Amazon S3 source object being copied. Valid value: AES256.)
"""
super(CopyObjectInputSet, self)._set_input('SSECSourceAlgorithm', value)
def set_SSECSourceKey(self, value):
"""
Set the value of the SSECSourceKey input for this Choreo. ((optional, string) The customer-provided AES-256 256-bit (32-byte) encryption key for Amazon S3 to use to decrypt the copy source object.)
"""
super(CopyObjectInputSet, self)._set_input('SSECSourceKey', value)
def set_ServerSideEncryption(self, value):
"""
Set the value of the ServerSideEncryption input for this Choreo. ((optional, string) Specifies the server-side encryption algorithm to use when Amazon S3 creates the target object. Valid value: AES256.)
"""
super(CopyObjectInputSet, self)._set_input('ServerSideEncryption', value)
def set_StorageClass(self, value):
"""
Set the value of the StorageClass input for this Choreo. ((optional, string) Enables RRS customers to store their noncritical, reproducible data at lower levels of redundancy than Amazon S3's standard storage. Valid Values: STANDARD (default), REDUCED_REDUNDANCY.)
"""
super(CopyObjectInputSet, self)._set_input('StorageClass', value)
def set_WebsiteRedirectLocation(self, value):
"""
Set the value of the WebsiteRedirectLocation input for this Choreo. ((optional, string) If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Ex: /anotherPage.html, http://www.page.com. Length limit: 2 K.)
"""
super(CopyObjectInputSet, self)._set_input('WebsiteRedirectLocation', value)
class CopyObjectResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the CopyObject Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Amazon.)
"""
return self._output.get('Response', None)
class CopyObjectChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return CopyObjectResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Amazon/S3/CopyObject.py | Python | apache-2.0 | 9,236 | 0.005414 |
# -*- Python -*-
import os
def get_required_attr(config, attr_name):
attr_value = getattr(config, attr_name, None)
if attr_value == None:
lit_config.fatal(
"No attribute %r in test configuration! You may need to run "
"tests from your build directory or add this attribute "
"to lit.site.cfg.py " % attr_name)
return attr_value
# Setup source root.
config.test_source_root = os.path.dirname(__file__)
config.name = 'UBSan-Minimal-' + config.target_arch
def build_invocation(compile_flags):
return " " + " ".join([config.clang] + compile_flags) + " "
target_cflags = [get_required_attr(config, "target_cflags")]
clang_ubsan_cflags = ["-fsanitize-minimal-runtime"] + target_cflags
clang_ubsan_cxxflags = config.cxx_mode_flags + clang_ubsan_cflags
# Define %clang and %clangxx substitutions to use in test RUN lines.
config.substitutions.append( ("%clang ", build_invocation(clang_ubsan_cflags)) )
config.substitutions.append( ("%clangxx ", build_invocation(clang_ubsan_cxxflags)) )
# Default test suffixes.
config.suffixes = ['.c', '.cc', '.cpp']
# Check that the host supports UndefinedBehaviorSanitizerMinimal tests
if config.host_os not in ['Linux', 'FreeBSD', 'NetBSD', 'Darwin', 'OpenBSD']: # TODO: Windows
config.unsupported = True
# Don't target x86_64h if the test machine can't execute x86_64h binaries.
if '-arch x86_64h' in target_cflags and 'x86_64h' not in config.available_features:
config.unsupported = True
config.available_features.add('arch=' + config.target_arch)
| r0mai/metashell | 3rd/templight/llvm/projects/compiler-rt/test/ubsan_minimal/lit.common.cfg.py | Python | gpl-3.0 | 1,525 | 0.013115 |
from steel.fields.base import *
from steel.fields.numbers import *
from steel.fields.strings import *
from steel.fields.compression import *
from steel.fields.compound import *
from steel.fields.integrity import *
| gulopine/steel | steel/fields/__init__.py | Python | bsd-3-clause | 214 | 0 |
#!/usr/bin/env python
#
# Copyright Science and Technology Facilities Council, 2009-2012.
#
# This file is part of ARTEMIS.
#
# ARTEMIS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ARTEMIS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ARTEMIS. If not, see <http://www.gnu.org/licenses/>.
#
import urllib2, datetime
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.mlab import griddata
from matplotlib import colors
from sys import exit as sys_exit
#Fall back to simplejson for versions of python < 2.5 (simplejson requires seperate install)
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
sys_exit("ERROR: Unable to find a usable json module, is simplejson installed?")
TEMP_MIN = 15
TEMP_MAX = 38
DPI = 106
cdict = {
'red' : ((0.0, 0.1, 0.1), (0.25, 0.0, 0.0), (0.5, 1.0, 1.0), (0.75, 1.0, 1.0), (1.0, 1.0, 1.0)),
'green' : ((0.0, 0.1, 0.1), (0.25, 0.0, 0.0), (0.5, 0.0, 0.0), (0.75, 1.0, 1.0), (1.0, 1.0, 1.0)),
'blue' : ((0.0, 0.1, 0.1), (0.25, 0.5, 0.5), (0.5, 0.0, 0.0), (0.75, 0.0, 0.0), (1.0, 1.0, 1.0)),
}
my_cmap = colors.LinearSegmentedColormap('my_colormap',cdict,256)
#pcolor(rand(10,10),cmap=plt.cm.jet)
def process(d, f, mode):
x = []
y = []
z = []
for i in d:
if "TEMPERATURE" in i[0]:
r = float(i[3])
c = float(i[4])
v = float(i[1])
x.append(r)
y.append(c)
z.append(v)
plot(x, y, z, "R89 HPD Room", f, mode)
def plot(x, y, z, title, filename, mode):
w = max(x) - min(x)
h = max(y) - min(y)
xi = np.linspace(min(x), max(x), w * 4)
yi = np.linspace(min(y), max(y), h * 4)
zi = griddata(x,y,z,xi,yi)
x = np.array(x)
y = np.array(y)
z = np.array(z)
plt.scatter(x,y,marker='o',c='b',s=5,zorder=10)
#CS = plt.contour(xi,yi,zi,15,linewidths=0.5,colors='k')
#CS = plt.contourf(xi,yi,zi,15,cmap=plt.cm.jet)
#CS = plt.contourf(xi,yi,zi,15,cmap=plt.cm.jet)
plt.pcolor(xi,yi,zi,cmap=plt.cm.jet)
plt.colorbar()
plt.clim(TEMP_MIN, TEMP_MAX)
ax = plt.axes()
ax.set_aspect('equal')
plt.xlim(min(x), max(x))
plt.ylim(max(y), min(y))
if mode == "range":
f = "hm/hm_%s.png" % filename
else:
f = filename
if mode == "gui":
plt.show()
plt.suptitle(title)
plt.colorbar()
else:
for a in ax.get_xticklabels():
a.set_visible(False)
for a in ax.get_yticklabels():
a.set_visible(False)
plt.savefig(f, dpi=DPI)
print("Wrote " + f)
plt.clf()
if __name__ == "__main__":
from optparse import OptionParser
VERSION = "1.0"
parser = OptionParser(version=VERSION)
parser.usage = " %prog URL [options]"
parser.description = "A utility to plot heatmaps from artemis probe data."
parser.add_option("--mode", metavar="STR", dest="mode", default="single", help="Run mode (single, gui or range)")
parser.add_option("--filename", metavar="STR", dest="filename", default="heatmap.png", help="Output filename (ignored in gui and range modes)")
(options, args) = parser.parse_args()
if len(args) == 1:
url = args[0]
p = urllib2.urlopen(url)
p = json.load(p)
if options.mode == "gui" or options.mode == "single":
p = p["probes"]
process(p, options.filename, options.mode)
elif options.mode == "range":
(time_start, period, time_end, p) = p
time_start = int(time_start)
period = int(period)
time_end = int(time_end)
for t in p.items():
(t,rv) = t
x = []
y = []
z = []
for r in rv:
(r,c,v) = r
if v <> None:
x.append(float(r))
y.append(float(c))
z.append(float(v))
if (len(x) == len(y)) and (len(x) == len(z)) and (len(x) > 0):
plot(
x,
y,
z,
"R89 HPD Room at %s" % datetime.datetime.fromtimestamp(time_start + period * int(t)).strftime("%Y-%m-%d %H:%M:%S"),
"%05d" % int(t),
options.mode
)
else:
import sys
sys.exit("ERROR: Unknown run mode")
else:
import sys
sys.exit("ERROR: URL not specified")
| jrha/artemis | tools/artemis-plot.py | Python | gpl-3.0 | 5,014 | 0.008975 |
from generic_module import BasicModule
from equipment.general import SolarPanel, DOCK_EQUIPMENT, WaterTank, CBM, Window, Battery, Comms
from equipment.lifesupport import UniversalToilet, WaterPurifier, OxygenElectrolyzer, RegenerableCO2Filter
from equipment.computer import DockingComputer, MissionComputer
from equipment.workshop import WorkbenchRack
import math
import numpy as np
from filtering import ClutterFilter
import util
import globalvars as gv
class ModuleComponent(object):
def __init__(self,pos = 0):
self.size = np.array([ 1 , 4.27 , 4.27 ])
self.sprite = None
self.module = None
self.nodes = []
self.equipment = []
self.edges = []
self.nodes.append(['hall'+str(pos),[0,0,0]])
self.entry_node = 'hall'+str(pos)
self.exit_node = 'hall'+str(pos)
if not hasattr(self,'name'): self.name = 'GenericComponent'
def refresh_image(self, imgfile, x_off = 0):
if self.sprite: self.sprite.delete()
import graphics_pyglet
self.sprite = graphics_pyglet.LayeredSprite(name=self.name,start_order = -30)
img = util.load_image(imgfile )
self.sprite.add_layer(self.name,img)
self.sprite._offset = [gv.config['ZOOM'] * 2 * x_off, 0]
#,anchor_x= int(
#if self.sprite is None: return
#
def draw(self,window):
#off_x = self.sprite.x
#self.sprite.update_sprite()
self.sprite.draw()
def __getstate__(self):
d = dict(self.__dict__)
del d['sprite']
return d
class DockingCap(ModuleComponent):
def __init__(self,pos=0):
self.name = 'OpenDock'+str(pos)
ModuleComponent.__init__(self,pos)
self.equipment.append( [ 'CBM'+str(pos), np.array([ -1 , 0 , 0 ]), np.array([ math.pi , 0]), 'CBM', CBM() ] )
#self.edges.append( [ ''.join(['hall',str(pos)]) , ''.join(['CBM',str(pos)]) ] )
def refresh_image(self, x_off = 0):
super(DockingCap, self).refresh_image('images/dockcap_comp.png',x_off)
class DockingCapClosed(DockingCap):
def __init__(self,pos=0):
DockingCap.__init__(self,pos)
self.equipment[0][1] = np.array([ 1 , 0 , 0])
self.equipment[0][2] = np.array([ 0 , 0])
def refresh_image(self, x_off = 0):
super(DockingCap, self).refresh_image('images/dockcap_comp_flip.png',x_off)
class DockingHub(ModuleComponent):
def __init__(self,pos=0):
self.name = 'OpenDock'+str(pos)
ModuleComponent.__init__(self,pos)
self.size = np.array([ 2 , 4.27 , 4.27 ])
self.equipment.append( [ 'CBM-L'+str(pos), np.array([ 0 , 1 , 0 ]), np.array([ math.pi/2 , 0]), 'CBM', CBM() ] )
self.equipment.append( [ 'CBM-R'+str(pos), np.array([ 0 , -1 , 0 ]), np.array([ -math.pi/2 , 0]), 'CBM', CBM() ] )
#self.edges.append( [ ''.join(['hall',str(pos)]) , ''.join(['CBM',str(pos)]) ] )
def refresh_image(self, x_off = 0):
super(DockingHub, self).refresh_image('images/double_comp.png',x_off)
class WorkshopRing(ModuleComponent):
def __init__(self,pos=0):
self.name = 'Workshop ring'+str(pos)
ModuleComponent.__init__(self,pos)
_sampdict = { 'nadir' : [0, -0.5, 0, -math.pi]}
for _d in _sampdict.keys():
self.equipment.append([ ''.join( [ _d , str( pos ) ] ), np.array([ 0 , _sampdict[_d][0] , _sampdict[_d][1] ]) , np.array([ _sampdict[_d][2] , _sampdict[_d][3] ]), 'WORKSHOP', WorkbenchRack() ])
#self.edges.append( [ ''.join( [ 'hall' , str( pos ) ] ) , ''.join( [ _d , str( pos ) ] ) ] )
def refresh_image(self, x_off = 0):
super(WorkshopRing, self).refresh_image('images/rack_comp.png',x_off)
class RackRing(ModuleComponent):
def __init__(self,pos=0):
self.name = 'Rack ring'+str(pos)
ModuleComponent.__init__(self,pos)
_sampdict = {'port' : [ -0.5, 0, math.pi, 0 ], 'starboard' : [ 0.5 , 0, -math.pi, 0 ], 'nadir' : [0, -0.5, 0, -math.pi]}
for _d in _sampdict.keys():
self.equipment.append([ ''.join( [ _d , str( pos ) ] ), np.array([ 0 , _sampdict[_d][0] , _sampdict[_d][1] ]) , np.array([ _sampdict[_d][2] , _sampdict[_d][3] ]), 'RACK', None ])
#self.edges.append( [ ''.join( [ 'hall' , str( pos ) ] ) , ''.join( [ _d , str( pos ) ] ) ] )
def refresh_image(self, x_off = 0):
super(RackRing, self).refresh_image('images/rack_comp.png',x_off)
def spawn_component(letter,pos=0):
if letter in '{':
return DockingCap(pos)
elif letter in '}':
return DockingCapClosed(pos)
elif letter in 'r':
return RackRing(pos)
elif letter in 'O':
return DockingHub(pos)
elif letter in 'w':
return WorkshopRing(pos)
class ModularModule(BasicModule):
def __init__(self,name = "Module", build_str = "{Orrrwrrr}" ):
self.component_string = build_str
self.components=[]
self.name=name
for ec,c in enumerate(self.component_string):
newc = spawn_component(c,ec)
if newc is not None:
newc.module = self
self.components.append(newc)
self.refresh_size()
BasicModule.__init__(self)
x_off = -self.size[0]/2
path_node = None
for c in self.components:
for n in c.nodes:
self.nodes[self.node(n[0])] = np.array([2,1,1])*(np.array([x_off,0,0]) + c.size*(n[1]+np.array([1,0,0]))/np.array([2,1,1]) )/self.size
for e in c.equipment:
loc = np.array([2,1,1])*(np.array([x_off,0,0]) + c.size*(e[1]+np.array([1,0,0]))/np.array([2,1,1]) )/self.size
self.add_equipment(e[0], e[4].install(self) if e[4] else None, loc, eq_orientation=e[2], eq_type=e[3] )
for e in c.edges:
self.add_edge(e[0],e[1])
if path_node:
self.add_edge( self.node(path_node), self.node(c.entry_node) )
path_node = c.exit_node
x_off += c.size[0]
self.refresh_image()
def refresh_size(self):
x,y,z = 0,0,0
for c in self.components:
x += c.size[0]
y = max(y,c.size[1])
z = max(z,c.size[2])
self.size = np.array([ x , y , z ])
def refresh_image(self):
if not gv.config['GRAPHICS']: return
if gv.config['GRAPHICS'] == 'pyglet':
import graphics_pyglet
if self.sprite: self.sprite.delete()
self.sprite = None#graphics_pyglet.LayeredSprite(name=self.name,start_order = -30)
x_off = -self.size[0]/2
for c in self.components:
#c.sprite = self.sprite
x_off += c.size[0] / 2.0
c.refresh_image(x_off)
x_off += c.size[0] / 2.0
def check_collision(self,x,y):
for c in self.components:
if c.sprite and c.sprite.contains(x,y): return True
return False
def draw(self,window):
zoom=gv.config['ZOOM']
for c in self.components:
l=self.location
c.sprite.update_sprite(zoom*l[0], zoom*l[1],-180*(self.orientation[0])/math.pi)
c.draw(window)
#self.img.blit(zoom*self.location[0]+window.width // 2, zoom*self.location[1]+window.height // 2, 0)
#if self.sprite and hasattr(self.sprite, 'update_sprite'):
# l=self.location
# self.sprite.update_sprite(zoom*l[0], zoom*l[1],-180*(self.orientation[0])/math.pi)
BasicModule.draw(self,window)
| facepalm/bliss-station-game | src/modular_module.py | Python | gpl-3.0 | 7,899 | 0.033675 |
"""
Django settings for edc_sms project.
Generated by 'django-admin startproject' using Django 3.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
import configparser
from django.core.management.color import color_style
APP_NAME = 'edc_sms'
SITE_ID = 40
style = color_style()
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
ETC_DIR = '/etc/'
CONFIG_FILE = f'{APP_NAME}.ini'
CONFIG_PATH = os.path.join(ETC_DIR, APP_NAME, CONFIG_FILE)
config = configparser.ConfigParser()
config.read(CONFIG_PATH)
BASE_API_URL = config['edc_sms']['base_api_url']
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'pyl0n2@jo6os@3ft62-rcn%53^2n8#1(9ff%m%v!g24yzhy5x*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django_crypto_fields.apps.AppConfig',
'django_extensions',
'simple_history',
'django_apscheduler',
'edc_model_admin.apps.AppConfig',
'edc_base.apps.AppConfig',
'edc_device.apps.AppConfig',
'edc_identifier.apps.AppConfig',
'edc_sms.apps.AppConfig',
'django_q'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'edc_dashboard.middleware.DashboardMiddleware',
]
ROOT_URLCONF = 'edc_sms.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'edc_sms.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Django q configurations
Q_CLUSTER = {
'name': 'edc_sms',
'retry': 60,
'orm': 'default',
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# dashboards
DASHBOARD_URL_NAMES = {
'contact_listboard_url': 'contact_listboard_url',
}
LAB_DASHBOARD_URL_NAMES = {}
DASHBOARD_BASE_TEMPLATES = {
'listboard_base_template': 'edc_sms/base.html',
'contact_listboard_template': 'edc_sms/listboard.html',
}
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Gaborone'
USE_I18N = True
USE_L10N = False
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| botswana-harvard/edc-sms | edc_sms/settings.py | Python | gpl-2.0 | 4,211 | 0.00095 |
def generate_ming_models(models):
pass | synappio/swagger-pyramid | swagger/models.py | Python | apache-2.0 | 42 | 0.02381 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# Generated by the Odoo plugin for Dia !
from odoo import api, fields, models
class CountryState(models.Model):
""" Add Municipalities reference in State """
_name = 'res.country.state'
_inherit = 'res.country.state'
_description="Country states"
municipality_id = fields.One2many('res.country.state.municipality', 'state_id', 'Municipalities in this state')
class StateMunicipality(models.Model):
"""States Municipalities"""
_name = 'res.country.state.municipality'
_description="State municipalities"
state_id = fields.Many2one('res.country.state', 'State', required=True, help='Name of the State to which the municipality belongs')
name = fields.Char('Municipality', required=True, help='Municipality name')
code = fields.Char('Code', size=3, required=True, help='Municipality code in max. three chars.')
parish_id = fields.One2many('res.country.state.municipality.parish', 'municipality_id', 'Parishes in this municipality')
class MunicipalityParish(models.Model):
"""States Parishes"""
_name = 'res.country.state.municipality.parish'
_description="Municipality parishes"
municipality_id = fields.Many2one('res.country.state.municipality', 'Municipality', help='Name of the Municipality to which the parish belongs')
name = fields.Char('Parish', required=True, help='Parish name')
code = fields.Char('Name',size=3, required=True, help='Parish Code in max. three chars.') | albertosalmeronunefa/tuconsejocomunal | addons/l10n_ve_dpt/models/l10n_ve_dpt.py | Python | gpl-3.0 | 2,432 | 0.004934 |
import sys
from sklearn.metrics import roc_curve
from sklearn.metrics import auc
# Function for All Models to produce Metrics ---------------------
def produce_model_metrics(fit, test_set, test_class_set, estimator):
"""
Purpose
----------
Function that will return predictions and probability metrics for said
predictions.
Parameters
----------
* fit: Fitted model containing the attribute feature_importances_
* test_set: dataframe/array containing the test set values
* test_class_set: array containing the target values for the test set
* estimator: String represenation of appropriate model, can only contain the
following: ['knn', 'rf', 'nn']
Returns
----------
Box plot graph for all numeric data in data frame
"""
my_estimators = {
'rf': 'estimators_',
'nn': 'out_activation_',
'knn': '_fit_method'
}
try:
# Captures whether first parameter is a model
if not hasattr(fit, 'fit'):
return print("'{0}' is not an instantiated model from scikit-learn".format(fit))
# Captures whether the model has been trained
if not vars(fit)[my_estimators[estimator]]:
return print("Model does not appear to be trained.")
except KeyError as e:
raise KeyError("'{0}' does not correspond with the appropriate key inside the estimators dictionary. \
Please refer to function to check `my_estimators` dictionary.".format(estimator))
# Outputting predictions and prediction probability
# for test set
predictions = fit.predict(test_set)
accuracy = fit.score(test_set, test_class_set)
# We grab the second array from the output which corresponds to
# to the predicted probabilites of positive classes
# Ordered wrt fit.classes_ in our case [0, 1] where 1 is our positive class
predictions_prob = fit.predict_proba(test_set)[:, 1]
# ROC Curve stuff
fpr, tpr, _ = roc_curve(test_class_set,
predictions_prob,
pos_label = 1)
auc_fit = auc(fpr, tpr)
return {'predictions': predictions,
'accuracy': accuracy,
'fpr': fpr,
'tpr': tpr,
'auc': auc_fit}
| raviolli77/machineLearning_breastCancer_Python | src/python/produce_model_metrics.py | Python | mit | 2,206 | 0.006346 |
import os
import unittest
import vtk, qt, ctk, slicer
import math
import sys
#
# AstroMomentMapsSelfTest
#
class AstroMomentMapsSelfTest:
def __init__(self, parent):
parent.title = "Astro MomentMaps SelfTest"
parent.categories = ["Testing.TestCases"]
parent.dependencies = ["AstroVolume"]
parent.contributors = ["""
Davide Punzo (Kapteyn Astronomical Institute) and
Thijs van der Hulst (Kapteyn Astronomical Institute)."""]
parent.helpText = """
This module was developed as a self test to perform the operations needed for generating moment maps.
"""
parent.acknowledgementText = """
""" # replace with organization, grant and thanks.
self.parent = parent
# Add this test to the SelfTest module's list for discovery when the module
# is created. Since this module may be discovered before SelfTests itself,
# create the list if it doesn't already exist.
try:
slicer.selfTests
except AttributeError:
slicer.selfTests = {}
slicer.selfTests['Astro MomentMaps SelfTest'] = self.runTest
def runTest(self):
tester = AstroMomentMapsSelfTestTest()
tester.runTest()
#
# qAstroMomentMapsSelfTestWidget
#
class AstroMomentMapsSelfTestWidget:
def __init__(self, parent = None):
if not parent:
self.parent = slicer.qMRMLWidget()
self.parent.setLayout(qt.QVBoxLayout())
self.parent.setMRMLScene(slicer.mrmlScene)
else:
self.parent = parent
self.layout = self.parent.layout()
if not parent:
self.setup()
self.parent.show()
def setup(self):
# Instantiate and connect widgets ...
# reload button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadButton = qt.QPushButton("Reload")
self.reloadButton.toolTip = "Reload this module."
self.reloadButton.name = "AstroMomentMapsSelfTest Reload"
self.layout.addWidget(self.reloadButton)
self.reloadButton.connect('clicked()', self.onReload)
# reload and test button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadAndTestButton = qt.QPushButton("Reload and Test")
self.reloadAndTestButton.toolTip = "Reload this module and then run the self tests."
self.layout.addWidget(self.reloadAndTestButton)
self.reloadAndTestButton.connect('clicked()', self.onReloadAndTest)
# Add vertical spacer
self.layout.addStretch(1)
def cleanup(self):
pass
def onReload(self,moduleName="AstroMomentMapsSelfTest"):
"""Generic reload method for any scripted module.
ModuleWizard will subsitute correct default moduleName.
"""
globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName)
def onReloadAndTest(self,moduleName="AstroMomentMapsSelfTest"):
self.onReload()
evalString = 'globals()["%s"].%sTest()' % (moduleName, moduleName)
tester = eval(evalString)
tester.runTest()
#
# AstroMomentMapsSelfTestLogic
#
class AstroMomentMapsSelfTestLogic:
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget
"""
def __init__(self):
pass
def hasImageData(self,volumeNode):
"""This is a dummy logic method that
returns true if the passed in volume
node has valid image data
"""
if not volumeNode:
print('no volume node')
return False
if volumeNode.GetImageData() is None:
print('no image data')
return False
return True
class AstroMomentMapsSelfTestTest(unittest.TestCase):
"""
This is the test case for your scripted module.
"""
def delayDisplay(self,message,msec=100):
"""This utility method displays a small dialog and waits.
This does two things: 1) it lets the event loop catch up
to the state of the test so that rendering and widget updates
have all taken place before the test continues and 2) it
shows the user/developer/tester the state of the test
so that we'll know when it breaks.
"""
print(message)
self.info = qt.QDialog()
self.infoLayout = qt.QVBoxLayout()
self.info.setLayout(self.infoLayout)
self.label = qt.QLabel(message,self.info)
self.infoLayout.addWidget(self.label)
qt.QTimer.singleShot(msec, self.info.close)
self.info.exec_()
def setUp(self):
slicer.mrmlScene.Clear(0)
def runTest(self):
self.setUp()
self.test_AstroMomentMapsSelfTest()
def test_AstroMomentMapsSelfTest(self):
print("Running AstroMomentMapsSelfTest Test case:")
self.downloadWEIN069()
astroVolume = slicer.util.getNode("WEIN069")
rms = astroVolume.GetDisplayThreshold()
mainWindow = slicer.util.mainWindow()
mainWindow.moduleSelector().selectModule('AstroVolume')
mainWindow.moduleSelector().selectModule('AstroMomentMaps')
astroMomentMapsModule = module = slicer.modules.astromomentmaps
astroMomentMapsModuleWidget = astroMomentMapsModule.widgetRepresentation()
AstroMomentMapsParameterNode = slicer.util.getNode("AstroMomentMapsParameters")
AstroMomentMapsParameterNode.SetIntensityMin(rms * 3)
QPushButtonList = astroMomentMapsModuleWidget.findChildren(qt.QPushButton)
for QPushButton in (QPushButtonList):
if QPushButton.name == "ApplyButton":
ApplyPushButton = QPushButton
self.delayDisplay('Calculating moment maps', 700)
ApplyPushButton.click()
ZeroMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetZeroMomentVolumeNodeID())
pixelValue0 = ZeroMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
FirstMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetFirstMomentVolumeNodeID())
pixelValue1 = FirstMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
SecondMomentMapVolume = slicer.mrmlScene.GetNodeByID(AstroMomentMapsParameterNode.GetSecondMomentVolumeNodeID())
pixelValue2 = SecondMomentMapVolume.GetImageData().GetScalarComponentAsFloat(56, 68, 0, 0)
if (math.fabs(pixelValue0 - 0.511788547039) < 1.e-6 and \
math.fabs(pixelValue1 - 5231.70947266) < 1.e-6 and \
math.fabs(pixelValue2 - 28.8058509827) < 1.e-6):
self.delayDisplay('Test passed', 700)
else:
self.delayDisplay('Test failed', 700)
# if run from Slicer interface remove the followinf exit
sys.exit()
def downloadWEIN069(self):
import AstroSampleData
astroSampleDataLogic = AstroSampleData.AstroSampleDataLogic()
self.delayDisplay('Getting WEIN069 Astro Volume')
WEIN069Volume = astroSampleDataLogic.downloadSample("WEIN069")
return WEIN069Volume
| Punzo/SlicerAstro | AstroMomentMaps/Testing/Python/AstroMomentMapsSelfTest.py | Python | bsd-3-clause | 6,817 | 0.008655 |
from setuptools import setup
setup(
name = 'pgdumper',
description = 'Simple PostgreSQL dumper',
author = 'Roman Dolgushin',
author_email = 'rd@roman-dolgushin.ru',
url = 'https://github.com/rdolgushin/pgdumper',
license = 'MIT',
version = '0.1',
packages = ['pgdumper'],
install_requires = ['mydumper'],
entry_points = {
'console_scripts': ['pgdumper = pgdumper.pgdumper:main']
},
classifiers = [
'Topic :: Utilities',
'Topic :: Database',
'Topic :: System :: Systems Administration',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX'
],
)
| rdolgushin/pgdumper | setup.py | Python | mit | 642 | 0.034268 |
from kivy.app import App
from kivy.factory import Factory
from kivy.properties import ObjectProperty
from kivy.lang import Builder
from electrum_gui.kivy.i18n import _
Builder.load_string('''
<FeeDialog@Popup>
id: popup
title: _('Transaction Fees')
size_hint: 0.8, 0.8
pos_hint: {'top':0.9}
method: 0
BoxLayout:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Method') + ':'
Button:
text: _('Mempool') if root.method == 2 else _('ETA') if root.method == 1 else _('Static')
background_color: (0,0,0,0)
bold: True
on_release:
root.method = (root.method + 1) % 3
root.update_slider()
root.update_text()
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: (_('Target') if root.method > 0 else _('Fee')) + ':'
Label:
id: fee_target
text: ''
Slider:
id: slider
range: 0, 4
step: 1
on_value: root.on_slider(self.value)
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
TopLabel:
id: fee_estimate
text: ''
font_size: '14dp'
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: popup.dismiss()
Button:
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.on_ok()
root.dismiss()
''')
class FeeDialog(Factory.Popup):
def __init__(self, app, config, callback):
Factory.Popup.__init__(self)
self.app = app
self.config = config
self.callback = callback
mempool = self.config.use_mempool_fees()
dynfees = self.config.is_dynfee()
self.method = (2 if mempool else 1) if dynfees else 0
self.update_slider()
self.update_text()
def update_text(self):
pos = int(self.ids.slider.value)
dynfees, mempool = self.get_method()
if self.method == 2:
fee_rate = self.config.depth_to_fee(pos)
target, estimate = self.config.get_fee_text(pos, dynfees, mempool, fee_rate)
msg = 'In the current network conditions, in order to be positioned %s, a transaction will require a fee of %s.' % (target, estimate)
elif self.method == 1:
fee_rate = self.config.eta_to_fee(pos)
target, estimate = self.config.get_fee_text(pos, dynfees, mempool, fee_rate)
msg = 'In the last few days, transactions that confirmed %s usually paid a fee of at least %s.' % (target.lower(), estimate)
else:
fee_rate = self.config.static_fee(pos)
target, estimate = self.config.get_fee_text(pos, dynfees, True, fee_rate)
msg = 'In the current network conditions, a transaction paying %s would be positioned %s.' % (target, estimate)
self.ids.fee_target.text = target
self.ids.fee_estimate.text = msg
def get_method(self):
dynfees = self.method > 0
mempool = self.method == 2
return dynfees, mempool
def update_slider(self):
slider = self.ids.slider
dynfees, mempool = self.get_method()
maxp, pos, fee_rate = self.config.get_fee_slider(dynfees, mempool)
slider.range = (0, maxp)
slider.step = 1
slider.value = pos
def on_ok(self):
value = int(self.ids.slider.value)
dynfees, mempool = self.get_method()
self.config.set_key('dynamic_fees', dynfees, False)
self.config.set_key('mempool_fees', mempool, False)
if dynfees:
if mempool:
self.config.set_key('depth_level', value, True)
else:
self.config.set_key('fee_level', value, True)
else:
self.config.set_key('fee_per_kb', self.config.static_fee(value), True)
self.callback()
def on_slider(self, value):
self.update_text()
| kyuupichan/electrum | gui/kivy/uix/dialogs/fee_dialog.py | Python | mit | 4,529 | 0.001987 |
import os
import sys
import time
import exceptions
import uuid
import logging
'''
@author: msune,lbergesio,omoya,CarolinaFernandez
@organization: i2CAT, OFELIA FP7
PolicyEngine Rule class
Encapsulates logic of a simple Rule
'''
from monscale.pypelib.Condition import Condition
from monscale.pypelib.persistence.PersistenceEngine import PersistenceEngine
from monscale.pypelib.utils.Logger import Logger
class TerminalMatch(exceptions.Exception):
value = None
desc = None
def __init__(self,rType,desc):
if isinstance(rType['value'],bool):
self.value = rType['value']
else:
raise Exception("Unknown rule type")
self.desc = desc
def __str__(self):
return "%s "%self.desc
class Rule():
logger = Logger.getLogger()
#Class Attributes
_condition = None
_description = None
_errorMsg = None
_uuid = None #uuid.uuid4().hex
_defaultParser = "RegexParser"
_defaultPersistence = "Django"
#Types of rule
POSITIVE_TERMINAL={'value':True,'terminal':True}
POSITIVE_NONTERMINAL={'value':True,'terminal':False}
NEGATIVE_TERMINAL={'value':False,'terminal':True}
NEGATIVE_NONTERMINAL={'value':False,'terminal':False}
_types = [POSITIVE_TERMINAL,POSITIVE_NONTERMINAL,NEGATIVE_TERMINAL, NEGATIVE_NONTERMINAL]
#Rule type
_type = None
#Rule match Action
_matchAction=None
#Getters
def getCondition(self):
return self._condition
def getDescription(self):
return self._description
def getType(self):
return self._type
def getErrorMsg(self):
return self._errorMsg
def getMatchAction(self):
return self._matchAction
def getUUID(self):
return self._uuid
#setters
def setUUID(self,UUID):
self._uuid = UUID
#Constructor
def __init__(self,condition,description,errorMsg,ruleType=POSITIVE_TERMINAL,action=None,uuid=None):
if not isinstance(condition,Condition):
raise Exception("Object must be an instance of Condition")
if ruleType not in self._types:
raise Exception("Unknown rule type")
if action == None and (ruleType == self.NEGATIVE_NONTERMINAL or ruleType == self.POSITIVE_NONTERMINAL):
raise Exception("You cannot create non-terminal actionless rules")
self._condition = condition
self._matchAction = action
self._type = ruleType
self._description = description
self._errorMsg = errorMsg
self._uuid = uuid
def dump(self):
#Debug dump
toReturn = self._condition.dump()
toReturn+="=> %s "%str(self._type['value'])
if self._matchAction != None:
toReturn += "(%s) "%str(self._matchAction)
if self._type['terminal']:
toReturn += "[TERM] "
if self._description:
toReturn+=" #"+self._description
return toReturn
#Resolver is passed at evaluation time to be able to dynamically redirect actions
def evaluate(self,metaObj,resolver):
try:
result = self._condition.evaluate(metaObj,resolver)
Rule.logger.debug('Result was: %s',str(result))
except Exception as e:
Rule.logger.error('Error on rule: %s',self.dump())
Rule.logger.error('Exception: %s', str(e))
Rule.logger.error('Rule will be skiped!')
result = False
if result:
if self._matchAction != None:
resolver.resolve(self._matchAction,metaObj)
#If is terminal raise TerminalMatch
if self._type['terminal']:
raise TerminalMatch(self._type,self._errorMsg)
#return whatever
return
def getConditionDump(self):
return self.getCondition().dump()
| jpardobl/monscale | monscale/pypelib/Rule.py | Python | bsd-3-clause | 3,386 | 0.056113 |
import re
from enum import Enum, auto
from fractions import Fraction
from io import BufferedIOBase
from numbers import Number
from typing import Iterable
import chardet
class CueCmd(Enum):
PERFORMER = auto()
TITLE = auto()
FILE = auto()
TRACK = auto()
INDEX = auto()
REM = auto()
EOF = auto()
def _unquote(s: str):
return s[1 + s.index('"'):s.rindex('"')]
_whitespace_pattern = re.compile(r'\s+')
_rem_tag_pattern = re.compile(r'([A-Z_]+) (.+)')
def parse_cd_time(offset: str) -> Number:
"""parse time in CD-DA (75fps) format to seconds, exactly
MM:SS:FF"""
m, s, f = map(int, offset.split(':'))
return m * 60 + s + Fraction(f, 75)
def _parse_cue_cmd(line: str, offset_in_seconds: bool = True):
line = line.strip()
cmd, args = _whitespace_pattern.split(line, 1)
if cmd == 'PERFORMER':
return CueCmd.PERFORMER, _unquote(args)
if cmd == 'TITLE':
return CueCmd.TITLE, _unquote(args)
if cmd == 'FILE':
return CueCmd.FILE, _unquote(args)
if cmd == 'TRACK':
number, _ = _whitespace_pattern.split(args, 1)
number = int(number)
return CueCmd.TRACK, number
if cmd == 'INDEX':
number, offset = _whitespace_pattern.split(args, 1)
number = int(number)
if offset_in_seconds:
offset = parse_cd_time(offset)
return CueCmd.INDEX, number, offset
if cmd == 'REM':
tag_name, tag_value = _rem_tag_pattern.fullmatch(args).groups()
return CueCmd.REM, tag_name, tag_value
return None
def read_cue_from_file(in_path: str) -> str:
with open(in_path, 'rb') as f:
assert isinstance(f, BufferedIOBase)
content = f.read()
encoding = chardet.detect(content)['encoding']
return content.decode(encoding)
def parse_cue_str(content: str, offset_in_seconds: bool = True) -> Iterable[tuple]:
for line in content.splitlines():
cmd = _parse_cue_cmd(line, offset_in_seconds)
if cmd:
yield cmd
yield CueCmd.EOF, None
| magicgoose/simple_dr_meter | audio_io/cue/cue_parser.py | Python | gpl-3.0 | 2,049 | 0.000488 |
from raiden.messages import Processed
from raiden.tests.utils import factories
from raiden.transfer.events import (
EventPaymentReceivedSuccess,
EventPaymentSentFailed,
EventPaymentSentSuccess,
)
from raiden.transfer.state_change import ActionCancelPayment, Block
from raiden.utils import sha3
ADDRESS = sha3(b"foo")[:20]
ADDRESS2 = sha3(b"boo")[:20]
ADDRESS3 = sha3(b"coo")[:20]
ADDRESS4 = sha3(b"goo")[:20]
SECRET = b"secret"
HASH = sha3(SECRET)
HASH2 = sha3(b"joo")
def test_transfer_statechange_operators():
# pylint: disable=unneeded-not
block_hash = factories.make_transaction_hash()
a = Block(block_number=2, gas_limit=1, block_hash=block_hash)
b = Block(block_number=2, gas_limit=1, block_hash=block_hash)
c = Block(block_number=3, gas_limit=1, block_hash=factories.make_transaction_hash())
assert a == b
assert not a != b
assert a != c
assert not a == c
a = ActionCancelPayment(2)
b = ActionCancelPayment(2)
c = ActionCancelPayment(3)
assert a == b
assert not a != b
assert a != c
assert not a == c
def test_event_operators():
a = EventPaymentSentSuccess(1, 4, 2, 5, sha3(b"target"))
b = EventPaymentSentSuccess(1, 4, 2, 5, sha3(b"target"))
c = EventPaymentSentSuccess(2, 7, 3, 4, sha3(b"target"))
d = EventPaymentSentSuccess(2, 7, 3, 4, sha3(b"differenttarget"))
# pylint: disable=unneeded-not
assert a == b
assert not a != b
assert a != c
assert not a == c
assert not c == d
a = EventPaymentSentFailed(1, 7, 2, "target", "BECAUSE")
b = EventPaymentSentFailed(1, 7, 2, "target", "BECAUSE")
c = EventPaymentSentFailed(3, 3, 3, "target", "UNKNOWN")
assert a == b
assert not a != b
assert a != c
assert not a == c
a = EventPaymentReceivedSuccess(4, 4, 2, 5, sha3(b"initiator"))
b = EventPaymentReceivedSuccess(4, 4, 2, 5, sha3(b"initiator"))
c = EventPaymentReceivedSuccess(1, 2, 3, 5, sha3(b"initiator"))
d = EventPaymentReceivedSuccess(1, 2, 3, 5, sha3(b"other initiator"))
assert a == b
assert not a != b
assert a != c
assert not a == c
assert c != d
assert not c == d
def test_message_operators():
message_identifier = 10
message_identifier2 = 11
a = Processed(message_identifier=message_identifier)
b = Processed(message_identifier=message_identifier)
c = Processed(message_identifier=message_identifier2)
# pylint: disable=unneeded-not
assert a == b
assert not a != b
assert a != c
assert not a == c
| hackaugusto/raiden | raiden/tests/unit/test_operators.py | Python | mit | 2,554 | 0.000392 |
from mock import MagicMock
import mock
from django.test import override_settings
from tests.utilities.utils import SafeTestCase
from tests.utilities.ldap import get_ldap_user_defaults
from accounts.models import (
User,
AccountRequest,
Intent
)
from projects.models import Project
from projects.receivers import check_general_eligibility
organization_info = {
'ucb': {
'long_name': 'University of Colorado Boulder',
'suffix': None,
'general_project_id': 'ucb-general'
},
'csu': {
'long_name': 'Colorado State University',
'suffix': 'colostate.edu',
'general_project_id': 'csu-general'
}
}
@override_settings(ORGANIZATION_INFO=organization_info)
class GeneralEligibilityReceiverTestCase(SafeTestCase):
def test_check_general_eligibility(self):
user_defaults = get_ldap_user_defaults()
auth_user_defaults = dict(
username=user_defaults['username'],
first_name=user_defaults['first_name'],
last_name=user_defaults['last_name'],
email=user_defaults['email']
)
auth_user = User.objects.create(**auth_user_defaults)
account_request_defaults = dict(
username=auth_user.username,
first_name=auth_user.first_name,
last_name=auth_user.last_name,
email=auth_user.email,
organization='ucb'
)
account_request = AccountRequest.objects.create(**account_request_defaults)
intent = Intent.objects.create(
account_request=account_request,
reason_summit=True
)
project_defaults = dict(
pi_emails=['pi@email.org'],
description='test project',
organization='ucb',
title='test project',
project_id='ucb-general'
)
project = Project.objects.create(**project_defaults)
check_general_eligibility(account_request.__class__,account_request=account_request)
project = Project.objects.get()
self.assertIn(auth_user,project.collaborators.all())
# No Summit intention declared, now add to 'general' account anyway
project.collaborators.clear()
intent.reason_summit = False
intent.save()
check_general_eligibility(account_request.__class__,account_request=account_request)
project = Project.objects.get()
self.assertIn(auth_user,project.collaborators.all())
def test_check_general_eligibility_suffixed(self):
user_defaults = get_ldap_user_defaults()
effective_uid = '{}@colostate.edu'.format(user_defaults['username'])
auth_user_defaults = dict(
username=effective_uid,
first_name=user_defaults['first_name'],
last_name=user_defaults['last_name'],
email=user_defaults['email']
)
auth_user = User.objects.create(**auth_user_defaults)
account_request_defaults = dict(
username=user_defaults['username'],
first_name=auth_user.first_name,
last_name=auth_user.last_name,
email=auth_user.email,
organization='csu'
)
account_request = AccountRequest.objects.create(**account_request_defaults)
intent = Intent.objects.create(
account_request=account_request,
reason_summit=True
)
project_defaults = dict(
pi_emails=['pi@email.org'],
description='test project',
organization='csu',
title='test project',
project_id='csu-general'
)
project = Project.objects.create(**project_defaults)
check_general_eligibility(account_request.__class__,account_request=account_request)
project = Project.objects.get()
self.assertIn(auth_user,project.collaborators.all())
def test_check_general_eligibility_no_intent(self):
user_defaults = get_ldap_user_defaults()
auth_user_defaults = dict(
username=user_defaults['username'],
first_name=user_defaults['first_name'],
last_name=user_defaults['last_name'],
email=user_defaults['email']
)
auth_user = User.objects.create(**auth_user_defaults)
account_request_defaults = dict(
username=auth_user.username,
first_name=auth_user.first_name,
last_name=auth_user.last_name,
email=auth_user.email,
organization='ucb'
)
account_request = AccountRequest.objects.create(**account_request_defaults)
check_general_eligibility(account_request.__class__,account_request=account_request)
| ResearchComputing/RCAMP | rcamp/tests/test_projects_receivers.py | Python | mit | 4,732 | 0.00317 |
'''
Copyright (C) 2013 Travis DeWolf
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import math
import numpy as np
import scipy.optimize
class Arm3Link:
def __init__(self, q=None, q0=None, L=None):
"""Set up the basic parameters of the arm.
All lists are in order [shoulder, elbow, wrist].
:param list q: the initial joint angles of the arm
:param list q0: the default (resting state) joint configuration
:param list L: the arm segment lengths
"""
# initial joint angles
if q is None: q = [math.pi/4, math.pi/4, 0]
self.q = q
# some default arm positions
if q0 is None: q0 = np.array([math.pi/4, math.pi/4, 0])
self.q0 = q0
# arm segment lengths
if L is None: L = np.array([1, 1, 1])
self.L = L
self.max_angles = [math.pi, math.pi, math.pi/4]
self.min_angles = [0, 0, -math.pi/4]
def get_xy(self, q=None):
"""Returns the corresponding hand xy coordinates for
a given set of joint angle values [shoulder, elbow, wrist],
and the above defined arm segment lengths, L
:param list q: the list of current joint angles
:returns list: the [x,y] position of the arm
"""
if q is None: q = self.q
x = self.L[0]*np.cos(q[0]) + \
self.L[1]*np.cos(q[0]+q[1]) + \
self.L[2]*np.cos(np.sum(q))
y = self.L[0]*np.sin(q[0]) + \
self.L[1]*np.sin(q[0]+q[1]) + \
self.L[2]*np.sin(np.sum(q))
return [x, y]
def inv_kin(self, xy):
"""This is just a quick write up to find the inverse kinematics
for a 3-link arm, using the SciPy optimize package minimization function.
Given an (x,y) position of the hand, return a set of joint angles (q)
using constraint based minimization, constraint is to match hand (x,y),
minimize the distance of each joint from it's default position (q0).
:param list xy: a tuple of the desired xy position of the arm
:returns list: the optimal [shoulder, elbow, wrist] angle configuration
"""
def distance_to_default(q, *args):
"""Objective function to minimize
Calculates the euclidean distance through joint space to the default
arm configuration. The weight list allows the penalty of each joint
being away from the resting position to be scaled differently, such
that the arm tries to stay closer to resting state more for higher
weighted joints than those with a lower weight.
:param list q: the list of current joint angles
:returns scalar: euclidean distance to the default arm position
"""
# weights found with trial and error, get some wrist bend, but not much
weight = [1, 1, 1.3]
return np.sqrt(np.sum([(qi - q0i)**2 * wi
for qi,q0i,wi in zip(q, self.q0, weight)]))
def x_constraint(q, xy):
"""Returns the corresponding hand xy coordinates for
a given set of joint angle values [shoulder, elbow, wrist],
and the above defined arm segment lengths, L
:param list q: the list of current joint angles
:returns: the difference between current and desired x position
"""
x = ( self.L[0]*np.cos(q[0]) + self.L[1]*np.cos(q[0]+q[1]) +
self.L[2]*np.cos(np.sum(q)) ) - xy[0]
return x
def y_constraint(q, xy):
"""Returns the corresponding hand xy coordinates for
a given set of joint angle values [shoulder, elbow, wrist],
and the above defined arm segment lengths, L
:param list q: the list of current joint angles
:returns: the difference between current and desired y position
"""
y = ( self.L[0]*np.sin(q[0]) + self.L[1]*np.sin(q[0]+q[1]) +
self.L[2]*np.sin(np.sum(q)) ) - xy[1]
return y
return scipy.optimize.fmin_slsqp( func=distance_to_default,
x0=self.q, eqcons=[x_constraint, y_constraint],
args=(xy,), iprint=0) # iprint=0 suppresses output
def test():
############Test it!##################
arm = Arm3Link()
# set of desired (x,y) hand positions
x = np.arange(-.75, .75, .05)
y = np.arange(0, .75, .05)
# threshold for printing out information, to find trouble spots
thresh = .025
count = 0
total_error = 0
# test it across the range of specified x and y values
for xi in range(len(x)):
for yi in range(len(y)):
# test the inv_kin function on a range of different targets
xy = [x[xi], y[yi]]
# run the inv_kin function, get the optimal joint angles
q = arm.inv_kin(xy=xy)
# find the (x,y) position of the hand given these angles
actual_xy = arm.get_xy(q)
# calculate the root squared error
error = np.sqrt((np.array(xy) - np.array(actual_xy))**2)
# total the error
total_error += error
# if the error was high, print out more information
if np.sum(error) > thresh:
print '-------------------------'
print 'Initial joint angles', arm.q
print 'Final joint angles: ', q
print 'Desired hand position: ', xy
print 'Actual hand position: ', actual_xy
print 'Error: ', error
print '-------------------------'
count += 1
print '\n---------Results---------'
print 'Total number of trials: ', count
print 'Total error: ', total_error
print '-------------------------'
if __name__ == "__main__":
test()
| Syralist/yet-another-hexapod | hexapy/Arm.py | Python | mit | 6,538 | 0.008259 |
# -*- mode: python; coding: utf-8 -*-
# Copyright © 2016 by Jeffrey C. Ollie
#
# This file is part of ceph_exporter.
#
# ceph_exporter is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# ceph_exporter is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ceph_exporter. If not, see
# <http://www.gnu.org/licenses/>.
from ...prometheus import Metric
__all__ = ['ceph_objects_recovered']
ceph_objects_recovered = Metric('ceph_objects_recovered', None, 'counter')
| jcollie/ceph_exporter | ceph_exporter/ceph/metrics/ceph_objects_recovered.py | Python | gpl-3.0 | 908 | 0 |
import unittest
import mock
from reports.tests.base import BaseBidsUtilityTest
from copy import copy
test_bids_invalid = [
[{
"owner": "test",
"date": "2016-03-17T13:32:25.774673+02:00",
"id": "44931d9653034837baff087cfc2fb5ac",
}],
[{
"status": "invalid",
"owner": "test",
"date": "2016-04-17T13:32:25.774673+02:00",
"id": "44931d9653034837baff087cfc2fb5ac"
}]
]
test_bids_valid = [
[{
"owner": "test",
"date": "2016-04-17T13:32:25.774673+02:00",
"id": "44931d9653034837baff087cfc2fb5ac",
}],
[{
"owner": "test",
"date": "2016-05-05T13:32:25.774673+02:00",
"id": "44931d9653034837baff087cfc2fb5ac",
}],
[{
"owner": "test",
"date": "2016-05-10T13:32:25.774673+02:00",
"id": "f55962b1374b43ddb886821c0582bc7f"
}]]
test_award_period = '2016-04-17T13:32:25.774673+02:00'
class ReportBidsViewTestCase(BaseBidsUtilityTest):
def test_bids_view_invalid_date(self):
data = {
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'teser',
"bids": test_bids_invalid[0],
}
self.assertLen(0, data)
def test_bids_view_invalid_mode(self):
data = {
'mode': 'test',
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'teser',
"bids": test_bids_valid[0],
}
self.assertLen(0, data)
def test_bids_view_invalid_status(self):
data = {
"procurementMethod": "open",
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'teser',
'bids': test_bids_invalid[1],
}
self.assertLen(0, data)
def test_bids_view_invalid_method(self):
data = {
"procurementMethod": "test",
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'teser',
'bids': test_bids_valid[0],
}
self.assertLen(0, data)
def test_bids_view_valid(self):
data = {
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'teser',
'bids': test_bids_valid[0],
}
self.assertLen(1, data)
response = list(self.utility.response)
self.assertEqual(1000, response[0]['value']['value'])
self.assertEqual(
"44931d9653034837baff087cfc2fb5ac", response[0]['value']['bid']
)
self.assertEqual(
"0006651836f34bcda9a030c0bf3c0e6e", response[0]['value']['tender']
)
self.assertEqual(
"UA-2016-11-12-000150", response[0]['value']['tenderID']
)
self.assertEqual(
u"UAH", response[0]['value']['currency']
)
def test_bids_view_period(self):
self.utility.owner = 'test'
data = {
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'teser',
'bids': test_bids_valid[0],
}
doc = copy(self.test_data)
doc.update(data)
self.utility.db.save(doc)
data = {
"_id": "10028cddd23540e5b6abb9efd2756d1d",
"awardPeriod": {
"startDate": "2016-11-09T15:00:00+02:00",
},
'owner': 'teser',
'bids': test_bids_valid[1],
}
doc = copy(self.test_data)
doc.update(data)
self.utility.db.save(doc)
data = {
"_id": "00028aasd2isdfsde5b6abb9efd2756d1d",
"awardPeriod": {
"startDate": "2016-11-30T15:00:00+02:00",
},
'owner': 'teser',
'bids': test_bids_valid[2],
}
doc = copy(self.test_data)
doc.update(data)
self.utility.db.save(doc)
self.utility.start_date = ''
self.utility.end_date = ''
self.utility.get_response()
self.assertEqual(3, len(list(self.utility.response)))
self.utility.start_date = "2016-11-10T15:00:00"
self.utility.end_date = ''
self.utility.get_response()
self.assertEqual(1, len(list(self.utility.response)))
self.utility.start_date = "2016-12-01T15:00:00"
self.utility.end_date = ''
self.utility.get_response()
self.assertEqual(0, len(list(self.utility.response)))
self.utility.start_date = "2016-11-01T15:00:00"
self.utility.end_date = "2016-12-01T15:00:00"
self.utility.get_response()
self.assertEqual(2, len(list(self.utility.response)))
def test_bids_view_with_lots(self):
data = {
"enquiryPeriod": {
"startDate": '2016-04-17T13:32:25.774673+02:00',
},
"awardPeriod": {
"startDate": test_award_period,
},
"lots": [
{
"status": "active",
"id": "324d7b2dd7a54df29bad6d0b7c91b2e9",
"value": {
"currency": "UAH",
"amount": 2000,
"valueAddedTaxIncluded": False,
},
}
],
"bids": [
{
"date": "2016-04-07T16:36:58.983102+03:00",
"owner": "test",
"id": "a22ef2b1374b43ddb886821c0582bc7dk",
"lotValues": [
{
"relatedLot": "324d7b2dd7a54df29bad6d0b7c91b2e9",
"date": "2016-04-07T16:36:58.983062+03:00",
}
],
}
],
}
self.assertLen(1, data)
class ReportBidsUtilityTestCase(BaseBidsUtilityTest):
def test_bids_utility_output(self):
data = {
"awardPeriod": {
"startDate": test_award_period,
},
'owner': 'test',
'bids': test_bids_valid[0],
}
mock_csv = mock.mock_open()
doc = copy(self.test_data)
doc.update(data)
self.utility.db.save(doc)
with mock.patch('__builtin__.open', mock_csv):
self.utility.run()
calls = [
mock.call('test/test@---bids.csv', 'w'),
mock.call().__enter__(),
mock.call().write(
str(','.join(self.utility.headers) + '\r\n')),
mock.call().write(
'0006651836f34bcda9a030c0bf3c0e6e,'
'UA-2016-11-12-000150,,1000,UAH,'
'44931d9653034837baff087cfc2fb5ac,,7.0\r\n'
),
mock.call().__exit__(None, None, None),
]
mock_csv.assert_has_calls(calls)
def test_bids_utility_output_with_lots(self):
data = {
"enquiryPeriod": {
"startDate": '2016-04-17T13:32:25.774673+02:00',
},
"awardPeriod": {
"startDate": test_award_period,
},
"lots": [
{
"status": "active",
"id": "324d7b2dd7a54df29bad6d0b7c91b2e9",
"value": {
"currency": "UAH",
"amount": 2000,
"valueAddedTaxIncluded": False,
},
}
],
"bids": [
{
"date": "2016-04-07T16:36:58.983102+03:00",
"owner": "test",
"id": "a22ef2b1374b43ddb886821c0582bc7dk",
"lotValues": [
{
"relatedLot": "324d7b2dd7a54df29bad6d0b7c91b2e9",
"date": "2016-04-07T16:36:58.983062+03:00",
}
],
}
],
}
mock_csv = mock.mock_open()
doc = copy(self.test_data)
doc.update(data)
self.utility.db.save(doc)
with mock.patch('__builtin__.open', mock_csv):
self.utility.run()
calls = [
mock.call('test/test@---bids.csv', 'w'),
mock.call().__enter__(),
mock.call().write(
str(','.join(self.utility.headers) + '\r\n')),
mock.call().write(
'0006651836f34bcda9a030c0bf3c0e6e,'
'UA-2016-11-12-000150,324d7b2dd7a54df29bad6d0b7c91b2e9,'
'2000,UAH,a22ef2b1374b43ddb886821c0582bc7dk,,7.0\r\n'
),
mock.call().__exit__(None, None, None),
]
mock_csv.assert_has_calls(calls)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ReportBidsViewTestCase))
suite.addTest(unittest.makeSuite(ReportBidsUtilityTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| yshalenyk/reports | reports/tests/bids_tests.py | Python | apache-2.0 | 9,230 | 0.000108 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
"""JSON flat file database system."""
import codecs
import os
import os.path
import re
from fcntl import flock, LOCK_EX, LOCK_SH, LOCK_UN
import redis
import json
import time
from rophako.settings import Config
from rophako.utils import handle_exception
from rophako.log import logger
redis_client = None
cache_lifetime = 60*60 # 1 hour
def get(document, cache=True):
"""Get a specific document from the DB."""
logger.debug("JsonDB: GET {}".format(document))
# Exists?
if not exists(document):
logger.debug("Requested document doesn't exist")
return None
path = mkpath(document)
stat = os.stat(path)
# Do we have it cached?
data = get_cache(document) if cache else None
if data:
# Check if the cache is fresh.
if stat.st_mtime > get_cache(document+"_mtime"):
del_cache(document)
del_cache(document+"_mtime")
else:
return data
# Get a lock for reading.
lock = lock_cache(document)
# Get the JSON data.
data = read_json(path)
# Unlock!
unlock_cache(lock)
# Cache and return it.
if cache:
set_cache(document, data, expires=cache_lifetime)
set_cache(document+"_mtime", stat.st_mtime, expires=cache_lifetime)
return data
def commit(document, data, cache=True):
"""Insert/update a document in the DB."""
# Only allow one commit at a time.
lock = lock_cache(document)
# Need to create the file?
path = mkpath(document)
if not os.path.isfile(path):
parts = path.split("/")
parts.pop() # Remove the file part
directory = list()
# Create all the folders.
for part in parts:
directory.append(part)
segment = "/".join(directory)
if len(segment) > 0 and not os.path.isdir(segment):
logger.debug("JsonDB: mkdir {}".format(segment))
os.mkdir(segment, 0o755)
# Write the JSON.
write_json(path, data)
# Update the cached document.
if cache:
set_cache(document, data, expires=cache_lifetime)
set_cache(document+"_mtime", time.time(), expires=cache_lifetime)
# Release the lock.
unlock_cache(lock)
def delete(document):
"""Delete a document from the DB."""
path = mkpath(document)
if os.path.isfile(path):
logger.debug("Delete DB document: {}".format(path))
os.unlink(path)
del_cache(document)
def exists(document):
"""Query whether a document exists."""
path = mkpath(document)
return os.path.isfile(path)
def list_docs(path, recursive=False):
"""List all the documents at the path."""
root = os.path.join(Config.db.db_root, path)
docs = list()
if not os.path.isdir(root):
return []
for item in sorted(os.listdir(root)):
target = os.path.join(root, item)
db_path = os.path.join(path, item)
# Descend into subdirectories?
if os.path.isdir(target):
if recursive:
docs += [
os.path.join(item, name) for name in list_docs(db_path)
]
else:
continue
if target.endswith(".json"):
name = re.sub(r'\.json$', '', item)
docs.append(name)
return docs
def mkpath(document):
"""Turn a DB path into a JSON file path."""
if document.endswith(".json"):
# Let's not do that.
raise Exception("mkpath: document path already includes .json extension!")
return "{}/{}.json".format(Config.db.db_root, str(document))
def read_json(path):
"""Slurp, decode and return the data from a JSON document."""
path = str(path)
if not os.path.isfile(path):
raise Exception("Can't read JSON file {}: file not found!".format(path))
# Don't allow any fishy looking paths.
if ".." in path:
logger.error("ERROR: JsonDB tried to read a path with two dots: {}".format(path))
raise Exception()
# Open and lock the file.
fh = codecs.open(path, 'r', 'utf-8')
flock(fh, LOCK_SH)
text = fh.read()
flock(fh, LOCK_UN)
fh.close()
# Decode.
try:
data = json.loads(text)
except:
logger.error("Couldn't decode JSON data from {}".format(path))
handle_exception(Exception("Couldn't decode JSON from {}\n{}".format(
path,
text,
)))
data = None
return data
def write_json(path, data):
"""Write a JSON document."""
path = str(path)
# Don't allow any fishy looking paths.
if ".." in path:
logger.error("ERROR: JsonDB tried to write a path with two dots: {}".format(path))
raise Exception()
logger.debug("JsonDB: WRITE > {}".format(path))
# Open and lock the file.
fh = codecs.open(path, 'w', 'utf-8')
flock(fh, LOCK_EX)
# Write it.
fh.write(json.dumps(data, indent=4, separators=(',', ': ')))
# Unlock and close.
flock(fh, LOCK_UN)
fh.close()
############################################################################
# Redis Caching Functions #
############################################################################
disable_redis = False
def get_redis():
"""Connect to Redis or return the existing connection."""
global redis_client
global disable_redis
if not redis_client and not disable_redis:
try:
redis_client = redis.StrictRedis(
host = Config.db.redis_host,
port = Config.db.redis_port,
db = Config.db.redis_db,
)
redis_client.ping()
except Exception as e:
logger.error("Couldn't connect to Redis; memory caching will be disabled! {}".format(e))
redis_client = None
disable_redis = True
return redis_client
def set_cache(key, value, expires=None):
"""Set a key in the Redis cache."""
key = Config.db.redis_prefix + key
client = get_redis()
if not client:
return
try:
client.set(key, json.dumps(value))
# Expiration date?
if expires:
client.expire(key, expires)
except:
logger.error("Redis exception: couldn't set_cache {}".format(key))
def get_cache(key):
"""Get a cached item."""
key = Config.db.redis_prefix + key
value = None
client = get_redis()
if not client:
return
try:
value = client.get(key)
if value:
value = json.loads(value)
except:
logger.debug("Redis exception: couldn't get_cache {}".format(key))
value = None
return value
def del_cache(key):
"""Delete a cached item."""
key = Config.db.redis_prefix + key
client = get_redis()
if not client:
return
client.delete(key)
def lock_cache(key, timeout=5, expire=20):
"""Cache level 'file locking' implementation.
The `key` will be automatically suffixed with `_lock`.
The `timeout` is the max amount of time to wait for a lock.
The `expire` is how long a lock may exist before it's considered stale.
Returns True on success, None on failure to acquire lock."""
client = get_redis()
if not client:
return
# Take the lock.
lock = client.lock(key, timeout=expire)
lock.acquire()
logger.debug("Cache lock acquired: {}, expires in {}s".format(key, expire))
return lock
def unlock_cache(lock):
"""Release the lock on a cache key."""
if lock:
lock.release()
logger.debug("Cache lock released")
| kirsle/rophako | rophako/jsondb.py | Python | gpl-2.0 | 7,743 | 0.002583 |
"""
Test the memory module.
"""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import shutil
import os
import os.path
import pickle
import sys
import time
import datetime
import pytest
from joblib.memory import Memory
from joblib.memory import MemorizedFunc, NotMemorizedFunc
from joblib.memory import MemorizedResult, NotMemorizedResult
from joblib.memory import _FUNCTION_HASHES
from joblib.memory import register_store_backend, _STORE_BACKENDS
from joblib.memory import _build_func_identifier, _store_backend_factory
from joblib.memory import JobLibCollisionWarning
from joblib.parallel import Parallel, delayed
from joblib._store_backends import StoreBackendBase, FileSystemStoreBackend
from joblib.test.common import with_numpy, np
from joblib.test.common import with_multiprocessing
from joblib.testing import parametrize, raises, warns
from joblib._compat import PY3_OR_LATER
from joblib.hashing import hash
if sys.version_info[:2] >= (3, 4):
import pathlib
###############################################################################
# Module-level variables for the tests
def f(x, y=1):
""" A module-level function for testing purposes.
"""
return x ** 2 + y
###############################################################################
# Helper function for the tests
def check_identity_lazy(func, accumulator, location):
""" Given a function and an accumulator (a list that grows every
time the function is called), check that the function can be
decorated by memory to be a lazy identity.
"""
# Call each function with several arguments, and check that it is
# evaluated only once per argument.
memory = Memory(location=location, verbose=0)
func = memory.cache(func)
for i in range(3):
for _ in range(2):
assert func(i) == i
assert len(accumulator) == i + 1
def corrupt_single_cache_item(memory):
single_cache_item, = memory.store_backend.get_items()
output_filename = os.path.join(single_cache_item.path, 'output.pkl')
with open(output_filename, 'w') as f:
f.write('garbage')
def monkeypatch_cached_func_warn(func, monkeypatch_fixture):
# Need monkeypatch because pytest does not
# capture stdlib logging output (see
# https://github.com/pytest-dev/pytest/issues/2079)
recorded = []
def append_to_record(item):
recorded.append(item)
monkeypatch_fixture.setattr(func, 'warn', append_to_record)
return recorded
###############################################################################
# Tests
def test_memory_integration(tmpdir):
""" Simple test of memory lazy evaluation.
"""
accumulator = list()
# Rmk: this function has the same name than a module-level function,
# thus it serves as a test to see that both are identified
# as different.
def f(l):
accumulator.append(1)
return l
check_identity_lazy(f, accumulator, tmpdir.strpath)
# Now test clearing
for compress in (False, True):
for mmap_mode in ('r', None):
memory = Memory(location=tmpdir.strpath, verbose=10,
mmap_mode=mmap_mode, compress=compress)
# First clear the cache directory, to check that our code can
# handle that
# NOTE: this line would raise an exception, as the database file is
# still open; we ignore the error since we want to test what
# happens if the directory disappears
shutil.rmtree(tmpdir.strpath, ignore_errors=True)
g = memory.cache(f)
g(1)
g.clear(warn=False)
current_accumulator = len(accumulator)
out = g(1)
assert len(accumulator) == current_accumulator + 1
# Also, check that Memory.eval works similarly
assert memory.eval(f, 1) == out
assert len(accumulator) == current_accumulator + 1
# Now do a smoke test with a function defined in __main__, as the name
# mangling rules are more complex
f.__module__ = '__main__'
memory = Memory(location=tmpdir.strpath, verbose=0)
memory.cache(f)(1)
def test_no_memory():
""" Test memory with location=None: no memoize """
accumulator = list()
def ff(l):
accumulator.append(1)
return l
memory = Memory(location=None, verbose=0)
gg = memory.cache(ff)
for _ in range(4):
current_accumulator = len(accumulator)
gg(1)
assert len(accumulator) == current_accumulator + 1
def test_memory_kwarg(tmpdir):
" Test memory with a function with keyword arguments."
accumulator = list()
def g(l=None, m=1):
accumulator.append(1)
return l
check_identity_lazy(g, accumulator, tmpdir.strpath)
memory = Memory(location=tmpdir.strpath, verbose=0)
g = memory.cache(g)
# Smoke test with an explicit keyword argument:
assert g(l=30, m=2) == 30
def test_memory_lambda(tmpdir):
" Test memory with a function with a lambda."
accumulator = list()
def helper(x):
""" A helper function to define l as a lambda.
"""
accumulator.append(1)
return x
l = lambda x: helper(x)
check_identity_lazy(l, accumulator, tmpdir.strpath)
def test_memory_name_collision(tmpdir):
" Check that name collisions with functions will raise warnings"
memory = Memory(location=tmpdir.strpath, verbose=0)
@memory.cache
def name_collision(x):
""" A first function called name_collision
"""
return x
a = name_collision
@memory.cache
def name_collision(x):
""" A second function called name_collision
"""
return x
b = name_collision
with warns(JobLibCollisionWarning) as warninfo:
a(1)
b(1)
assert len(warninfo) == 1
assert "collision" in str(warninfo[0].message)
def test_memory_warning_lambda_collisions(tmpdir):
# Check that multiple use of lambda will raise collisions
memory = Memory(location=tmpdir.strpath, verbose=0)
a = lambda x: x
a = memory.cache(a)
b = lambda x: x + 1
b = memory.cache(b)
with warns(JobLibCollisionWarning) as warninfo:
assert a(0) == 0
assert b(1) == 2
assert a(1) == 1
# In recent Python versions, we can retrieve the code of lambdas,
# thus nothing is raised
assert len(warninfo) == 4
def test_memory_warning_collision_detection(tmpdir):
# Check that collisions impossible to detect will raise appropriate
# warnings.
memory = Memory(location=tmpdir.strpath, verbose=0)
a1 = eval('lambda x: x')
a1 = memory.cache(a1)
b1 = eval('lambda x: x+1')
b1 = memory.cache(b1)
with warns(JobLibCollisionWarning) as warninfo:
a1(1)
b1(1)
a1(0)
assert len(warninfo) == 2
assert "cannot detect" in str(warninfo[0].message).lower()
def test_memory_partial(tmpdir):
" Test memory with functools.partial."
accumulator = list()
def func(x, y):
""" A helper function to define l as a lambda.
"""
accumulator.append(1)
return y
import functools
function = functools.partial(func, 1)
check_identity_lazy(function, accumulator, tmpdir.strpath)
def test_memory_eval(tmpdir):
" Smoke test memory with a function with a function defined in an eval."
memory = Memory(location=tmpdir.strpath, verbose=0)
m = eval('lambda x: x')
mm = memory.cache(m)
assert mm(1) == 1
def count_and_append(x=[]):
""" A function with a side effect in its arguments.
Return the lenght of its argument and append one element.
"""
len_x = len(x)
x.append(None)
return len_x
def test_argument_change(tmpdir):
""" Check that if a function has a side effect in its arguments, it
should use the hash of changing arguments.
"""
memory = Memory(location=tmpdir.strpath, verbose=0)
func = memory.cache(count_and_append)
# call the function for the first time, is should cache it with
# argument x=[]
assert func() == 0
# the second time the argument is x=[None], which is not cached
# yet, so the functions should be called a second time
assert func() == 1
@with_numpy
@parametrize('mmap_mode', [None, 'r'])
def test_memory_numpy(tmpdir, mmap_mode):
" Test memory with a function with numpy arrays."
accumulator = list()
def n(l=None):
accumulator.append(1)
return l
memory = Memory(location=tmpdir.strpath, mmap_mode=mmap_mode,
verbose=0)
cached_n = memory.cache(n)
rnd = np.random.RandomState(0)
for i in range(3):
a = rnd.random_sample((10, 10))
for _ in range(3):
assert np.all(cached_n(a) == a)
assert len(accumulator) == i + 1
@with_numpy
def test_memory_numpy_check_mmap_mode(tmpdir, monkeypatch):
"""Check that mmap_mode is respected even at the first call"""
memory = Memory(location=tmpdir.strpath, mmap_mode='r', verbose=0)
@memory.cache()
def twice(a):
return a * 2
a = np.ones(3)
b = twice(a)
c = twice(a)
assert isinstance(c, np.memmap)
assert c.mode == 'r'
assert isinstance(b, np.memmap)
assert b.mode == 'r'
# Corrupts the file, Deleting b and c mmaps
# is necessary to be able edit the file
del b
del c
corrupt_single_cache_item(memory)
# Make sure that corrupting the file causes recomputation and that
# a warning is issued.
recorded_warnings = monkeypatch_cached_func_warn(twice, monkeypatch)
d = twice(a)
assert len(recorded_warnings) == 1
exception_msg = 'Exception while loading results'
assert exception_msg in recorded_warnings[0]
# Asserts that the recomputation returns a mmap
assert isinstance(d, np.memmap)
assert d.mode == 'r'
def test_memory_exception(tmpdir):
""" Smoketest the exception handling of Memory.
"""
memory = Memory(location=tmpdir.strpath, verbose=0)
class MyException(Exception):
pass
@memory.cache
def h(exc=0):
if exc:
raise MyException
# Call once, to initialise the cache
h()
for _ in range(3):
# Call 3 times, to be sure that the Exception is always raised
with raises(MyException):
h(1)
def test_memory_ignore(tmpdir):
" Test the ignore feature of memory "
memory = Memory(location=tmpdir.strpath, verbose=0)
accumulator = list()
@memory.cache(ignore=['y'])
def z(x, y=1):
accumulator.append(1)
assert z.ignore == ['y']
z(0, y=1)
assert len(accumulator) == 1
z(0, y=1)
assert len(accumulator) == 1
z(0, y=2)
assert len(accumulator) == 1
def test_memory_args_as_kwargs(tmpdir):
"""Non-regression test against 0.12.0 changes.
https://github.com/joblib/joblib/pull/751
"""
memory = Memory(location=tmpdir.strpath, verbose=0)
@memory.cache
def plus_one(a):
return a + 1
# It's possible to call a positional arg as a kwarg.
assert plus_one(1) == 2
assert plus_one(a=1) == 2
# However, a positional argument that joblib hadn't seen
# before would cause a failure if it was passed as a kwarg.
assert plus_one(a=2) == 3
@parametrize('ignore, verbose, mmap_mode', [(['x'], 100, 'r'),
([], 10, None)])
def test_partial_decoration(tmpdir, ignore, verbose, mmap_mode):
"Check cache may be called with kwargs before decorating"
memory = Memory(location=tmpdir.strpath, verbose=0)
@memory.cache(ignore=ignore, verbose=verbose, mmap_mode=mmap_mode)
def z(x):
pass
assert z.ignore == ignore
assert z._verbose == verbose
assert z.mmap_mode == mmap_mode
def test_func_dir(tmpdir):
# Test the creation of the memory cache directory for the function.
memory = Memory(location=tmpdir.strpath, verbose=0)
path = __name__.split('.')
path.append('f')
path = tmpdir.join('joblib', *path).strpath
g = memory.cache(f)
# Test that the function directory is created on demand
func_id = _build_func_identifier(f)
location = os.path.join(g.store_backend.location, func_id)
assert location == path
assert os.path.exists(path)
assert memory.location == os.path.dirname(g.store_backend.location)
with warns(DeprecationWarning) as w:
assert memory.cachedir == g.store_backend.location
assert len(w) == 1
assert "The 'cachedir' attribute has been deprecated" in str(w[-1].message)
# Test that the code is stored.
# For the following test to be robust to previous execution, we clear
# the in-memory store
_FUNCTION_HASHES.clear()
assert not g._check_previous_func_code()
assert os.path.exists(os.path.join(path, 'func_code.py'))
assert g._check_previous_func_code()
# Test the robustness to failure of loading previous results.
func_id, args_id = g._get_output_identifiers(1)
output_dir = os.path.join(g.store_backend.location, func_id, args_id)
a = g(1)
assert os.path.exists(output_dir)
os.remove(os.path.join(output_dir, 'output.pkl'))
assert a == g(1)
def test_persistence(tmpdir):
# Test the memorized functions can be pickled and restored.
memory = Memory(location=tmpdir.strpath, verbose=0)
g = memory.cache(f)
output = g(1)
h = pickle.loads(pickle.dumps(g))
func_id, args_id = h._get_output_identifiers(1)
output_dir = os.path.join(h.store_backend.location, func_id, args_id)
assert os.path.exists(output_dir)
assert output == h.store_backend.load_item([func_id, args_id])
memory2 = pickle.loads(pickle.dumps(memory))
assert memory.store_backend.location == memory2.store_backend.location
# Smoke test that pickling a memory with location=None works
memory = Memory(location=None, verbose=0)
pickle.loads(pickle.dumps(memory))
g = memory.cache(f)
gp = pickle.loads(pickle.dumps(g))
gp(1)
def test_call_and_shelve(tmpdir):
# Test MemorizedFunc outputting a reference to cache.
for func, Result in zip((MemorizedFunc(f, tmpdir.strpath),
NotMemorizedFunc(f),
Memory(location=tmpdir.strpath,
verbose=0).cache(f),
Memory(location=None).cache(f),
),
(MemorizedResult, NotMemorizedResult,
MemorizedResult, NotMemorizedResult)):
assert func(2) == 5
result = func.call_and_shelve(2)
assert isinstance(result, Result)
assert result.get() == 5
result.clear()
with raises(KeyError):
result.get()
result.clear() # Do nothing if there is no cache.
def test_call_and_shelve_argument_hash(tmpdir):
# Verify that a warning is raised when accessing arguments_hash
# attribute from MemorizedResult
func = Memory(location=tmpdir.strpath, verbose=0).cache(f)
result = func.call_and_shelve(2)
assert isinstance(result, MemorizedResult)
with warns(DeprecationWarning) as w:
assert result.argument_hash == result.args_id
assert len(w) == 1
assert "The 'argument_hash' attribute has been deprecated" \
in str(w[-1].message)
def test_call_and_shelve_lazily_load_stored_result(tmpdir):
"""Check call_and_shelve only load stored data if needed."""
test_access_time_file = tmpdir.join('test_access')
test_access_time_file.write('test_access')
test_access_time = os.stat(test_access_time_file.strpath).st_atime
# check file system access time stats resolution is lower than test wait
# timings.
time.sleep(0.5)
assert test_access_time_file.read() == 'test_access'
if test_access_time == os.stat(test_access_time_file.strpath).st_atime:
# Skip this test when access time cannot be retrieved with enough
# precision from the file system (e.g. NTFS on windows).
pytest.skip("filesystem does not support fine-grained access time "
"attribute")
memory = Memory(location=tmpdir.strpath, verbose=0)
func = memory.cache(f)
func_id, argument_hash = func._get_output_identifiers(2)
result_path = os.path.join(memory.store_backend.location,
func_id, argument_hash, 'output.pkl')
assert func(2) == 5
first_access_time = os.stat(result_path).st_atime
time.sleep(1)
# Should not access the stored data
result = func.call_and_shelve(2)
assert isinstance(result, MemorizedResult)
assert os.stat(result_path).st_atime == first_access_time
time.sleep(1)
# Read the stored data => last access time is greater than first_access
assert result.get() == 5
assert os.stat(result_path).st_atime > first_access_time
def test_memorized_pickling(tmpdir):
for func in (MemorizedFunc(f, tmpdir.strpath), NotMemorizedFunc(f)):
filename = tmpdir.join('pickling_test.dat').strpath
result = func.call_and_shelve(2)
with open(filename, 'wb') as fp:
pickle.dump(result, fp)
with open(filename, 'rb') as fp:
result2 = pickle.load(fp)
assert result2.get() == result.get()
os.remove(filename)
def test_memorized_repr(tmpdir):
func = MemorizedFunc(f, tmpdir.strpath)
result = func.call_and_shelve(2)
func2 = MemorizedFunc(f, tmpdir.strpath)
result2 = func2.call_and_shelve(2)
assert result.get() == result2.get()
assert repr(func) == repr(func2)
# Smoke test with NotMemorizedFunc
func = NotMemorizedFunc(f)
repr(func)
repr(func.call_and_shelve(2))
# Smoke test for message output (increase code coverage)
func = MemorizedFunc(f, tmpdir.strpath, verbose=11, timestamp=time.time())
result = func.call_and_shelve(11)
result.get()
func = MemorizedFunc(f, tmpdir.strpath, verbose=11)
result = func.call_and_shelve(11)
result.get()
func = MemorizedFunc(f, tmpdir.strpath, verbose=5, timestamp=time.time())
result = func.call_and_shelve(11)
result.get()
func = MemorizedFunc(f, tmpdir.strpath, verbose=5)
result = func.call_and_shelve(11)
result.get()
def test_memory_file_modification(capsys, tmpdir, monkeypatch):
# Test that modifying a Python file after loading it does not lead to
# Recomputation
dir_name = tmpdir.mkdir('tmp_import').strpath
filename = os.path.join(dir_name, 'tmp_joblib_.py')
content = 'def f(x):\n print(x)\n return x\n'
with open(filename, 'w') as module_file:
module_file.write(content)
# Load the module:
monkeypatch.syspath_prepend(dir_name)
import tmp_joblib_ as tmp
memory = Memory(location=tmpdir.strpath, verbose=0)
f = memory.cache(tmp.f)
# First call f a few times
f(1)
f(2)
f(1)
# Now modify the module where f is stored without modifying f
with open(filename, 'w') as module_file:
module_file.write('\n\n' + content)
# And call f a couple more times
f(1)
f(1)
# Flush the .pyc files
shutil.rmtree(dir_name)
os.mkdir(dir_name)
# Now modify the module where f is stored, modifying f
content = 'def f(x):\n print("x=%s" % x)\n return x\n'
with open(filename, 'w') as module_file:
module_file.write(content)
# And call f more times prior to reloading: the cache should not be
# invalidated at this point as the active function definition has not
# changed in memory yet.
f(1)
f(1)
# Now reload
sys.stdout.write('Reloading\n')
sys.modules.pop('tmp_joblib_')
import tmp_joblib_ as tmp
f = memory.cache(tmp.f)
# And call f more times
f(1)
f(1)
out, err = capsys.readouterr()
assert out == '1\n2\nReloading\nx=1\n'
def _function_to_cache(a, b):
# Just a place holder function to be mutated by tests
pass
def _sum(a, b):
return a + b
def _product(a, b):
return a * b
def test_memory_in_memory_function_code_change(tmpdir):
_function_to_cache.__code__ = _sum.__code__
memory = Memory(location=tmpdir.strpath, verbose=0)
f = memory.cache(_function_to_cache)
assert f(1, 2) == 3
assert f(1, 2) == 3
with warns(JobLibCollisionWarning):
# Check that inline function modification triggers a cache invalidation
_function_to_cache.__code__ = _product.__code__
assert f(1, 2) == 2
assert f(1, 2) == 2
def test_clear_memory_with_none_location():
memory = Memory(location=None)
memory.clear()
if PY3_OR_LATER:
# Avoid flake8 F821 "undefined name" warning. func_with_kwonly_args and
# func_with_signature are redefined in the exec statement a few lines below
def func_with_kwonly_args():
pass
def func_with_signature():
pass
# exec is needed to define a function with a keyword-only argument and a
# function with signature while avoiding a SyntaxError on Python 2
exec("""
def func_with_kwonly_args(a, b, *, kw1='kw1', kw2='kw2'):
return a, b, kw1, kw2
def func_with_signature(a: int, b: float) -> float:
return a + b
""")
def test_memory_func_with_kwonly_args(tmpdir):
memory = Memory(location=tmpdir.strpath, verbose=0)
func_cached = memory.cache(func_with_kwonly_args)
assert func_cached(1, 2, kw1=3) == (1, 2, 3, 'kw2')
# Making sure that providing a keyword-only argument by
# position raises an exception
with raises(ValueError) as excinfo:
func_cached(1, 2, 3, kw2=4)
excinfo.match("Keyword-only parameter 'kw1' was passed as positional "
"parameter")
# Keyword-only parameter passed by position with cached call
# should still raise ValueError
func_cached(1, 2, kw1=3, kw2=4)
with raises(ValueError) as excinfo:
func_cached(1, 2, 3, kw2=4)
excinfo.match("Keyword-only parameter 'kw1' was passed as positional "
"parameter")
# Test 'ignore' parameter
func_cached = memory.cache(func_with_kwonly_args, ignore=['kw2'])
assert func_cached(1, 2, kw1=3, kw2=4) == (1, 2, 3, 4)
assert func_cached(1, 2, kw1=3, kw2='ignored') == (1, 2, 3, 4)
def test_memory_func_with_signature(tmpdir):
memory = Memory(location=tmpdir.strpath, verbose=0)
func_cached = memory.cache(func_with_signature)
assert func_cached(1, 2.) == 3.
def _setup_toy_cache(tmpdir, num_inputs=10):
memory = Memory(location=tmpdir.strpath, verbose=0)
@memory.cache()
def get_1000_bytes(arg):
return 'a' * 1000
inputs = list(range(num_inputs))
for arg in inputs:
get_1000_bytes(arg)
func_id = _build_func_identifier(get_1000_bytes)
hash_dirnames = [get_1000_bytes._get_output_identifiers(arg)[1]
for arg in inputs]
full_hashdirs = [os.path.join(get_1000_bytes.store_backend.location,
func_id, dirname)
for dirname in hash_dirnames]
return memory, full_hashdirs, get_1000_bytes
def test__get_items(tmpdir):
memory, expected_hash_dirs, _ = _setup_toy_cache(tmpdir)
items = memory.store_backend.get_items()
hash_dirs = [ci.path for ci in items]
assert set(hash_dirs) == set(expected_hash_dirs)
def get_files_size(directory):
full_paths = [os.path.join(directory, fn)
for fn in os.listdir(directory)]
return sum(os.path.getsize(fp) for fp in full_paths)
expected_hash_cache_sizes = [get_files_size(hash_dir)
for hash_dir in hash_dirs]
hash_cache_sizes = [ci.size for ci in items]
assert hash_cache_sizes == expected_hash_cache_sizes
output_filenames = [os.path.join(hash_dir, 'output.pkl')
for hash_dir in hash_dirs]
expected_last_accesses = [
datetime.datetime.fromtimestamp(os.path.getatime(fn))
for fn in output_filenames]
last_accesses = [ci.last_access for ci in items]
assert last_accesses == expected_last_accesses
def test__get_items_to_delete(tmpdir):
memory, expected_hash_cachedirs, _ = _setup_toy_cache(tmpdir)
items = memory.store_backend.get_items()
# bytes_limit set to keep only one cache item (each hash cache
# folder is about 1000 bytes + metadata)
items_to_delete = memory.store_backend._get_items_to_delete('2K')
nb_hashes = len(expected_hash_cachedirs)
assert set.issubset(set(items_to_delete), set(items))
assert len(items_to_delete) == nb_hashes - 1
# Sanity check bytes_limit=2048 is the same as bytes_limit='2K'
items_to_delete_2048b = memory.store_backend._get_items_to_delete(2048)
assert sorted(items_to_delete) == sorted(items_to_delete_2048b)
# bytes_limit greater than the size of the cache
items_to_delete_empty = memory.store_backend._get_items_to_delete('1M')
assert items_to_delete_empty == []
# All the cache items need to be deleted
bytes_limit_too_small = 500
items_to_delete_500b = memory.store_backend._get_items_to_delete(
bytes_limit_too_small)
assert set(items_to_delete_500b), set(items)
# Test LRU property: surviving cache items should all have a more
# recent last_access that the ones that have been deleted
items_to_delete_6000b = memory.store_backend._get_items_to_delete(6000)
surviving_items = set(items).difference(items_to_delete_6000b)
assert (max(ci.last_access for ci in items_to_delete_6000b) <=
min(ci.last_access for ci in surviving_items))
def test_memory_reduce_size(tmpdir):
memory, _, _ = _setup_toy_cache(tmpdir)
ref_cache_items = memory.store_backend.get_items()
# By default memory.bytes_limit is None and reduce_size is a noop
memory.reduce_size()
cache_items = memory.store_backend.get_items()
assert sorted(ref_cache_items) == sorted(cache_items)
# No cache items deleted if bytes_limit greater than the size of
# the cache
memory.bytes_limit = '1M'
memory.reduce_size()
cache_items = memory.store_backend.get_items()
assert sorted(ref_cache_items) == sorted(cache_items)
# bytes_limit is set so that only two cache items are kept
memory.bytes_limit = '3K'
memory.reduce_size()
cache_items = memory.store_backend.get_items()
assert set.issubset(set(cache_items), set(ref_cache_items))
assert len(cache_items) == 2
# bytes_limit set so that no cache item is kept
bytes_limit_too_small = 500
memory.bytes_limit = bytes_limit_too_small
memory.reduce_size()
cache_items = memory.store_backend.get_items()
assert cache_items == []
def test_memory_clear(tmpdir):
memory, _, _ = _setup_toy_cache(tmpdir)
memory.clear()
assert os.listdir(memory.store_backend.location) == []
def fast_func_with_complex_output():
complex_obj = ['a' * 1000] * 1000
return complex_obj
def fast_func_with_conditional_complex_output(complex_output=True):
complex_obj = {str(i): i for i in range(int(1e5))}
return complex_obj if complex_output else 'simple output'
@with_multiprocessing
def test_cached_function_race_condition_when_persisting_output(tmpdir, capfd):
# Test race condition where multiple processes are writing into
# the same output.pkl. See
# https://github.com/joblib/joblib/issues/490 for more details.
memory = Memory(location=tmpdir.strpath)
func_cached = memory.cache(fast_func_with_complex_output)
Parallel(n_jobs=2)(delayed(func_cached)() for i in range(3))
stdout, stderr = capfd.readouterr()
# Checking both stdout and stderr (ongoing PR #434 may change
# logging destination) to make sure there is no exception while
# loading the results
exception_msg = 'Exception while loading results'
assert exception_msg not in stdout
assert exception_msg not in stderr
@with_multiprocessing
def test_cached_function_race_condition_when_persisting_output_2(tmpdir,
capfd):
# Test race condition in first attempt at solving
# https://github.com/joblib/joblib/issues/490. The race condition
# was due to the delay between seeing the cache directory created
# (interpreted as the result being cached) and the output.pkl being
# pickled.
memory = Memory(location=tmpdir.strpath)
func_cached = memory.cache(fast_func_with_conditional_complex_output)
Parallel(n_jobs=2)(delayed(func_cached)(True if i % 2 == 0 else False)
for i in range(3))
stdout, stderr = capfd.readouterr()
# Checking both stdout and stderr (ongoing PR #434 may change
# logging destination) to make sure there is no exception while
# loading the results
exception_msg = 'Exception while loading results'
assert exception_msg not in stdout
assert exception_msg not in stderr
def test_memory_recomputes_after_an_error_while_loading_results(
tmpdir, monkeypatch):
memory = Memory(location=tmpdir.strpath)
def func(arg):
# This makes sure that the timestamp returned by two calls of
# func are different. This is needed on Windows where
# time.time resolution may not be accurate enough
time.sleep(0.01)
return arg, time.time()
cached_func = memory.cache(func)
input_arg = 'arg'
arg, timestamp = cached_func(input_arg)
# Make sure the function is correctly cached
assert arg == input_arg
# Corrupting output.pkl to make sure that an error happens when
# loading the cached result
corrupt_single_cache_item(memory)
# Make sure that corrupting the file causes recomputation and that
# a warning is issued.
recorded_warnings = monkeypatch_cached_func_warn(cached_func, monkeypatch)
recomputed_arg, recomputed_timestamp = cached_func(arg)
assert len(recorded_warnings) == 1
exception_msg = 'Exception while loading results'
assert exception_msg in recorded_warnings[0]
assert recomputed_arg == arg
assert recomputed_timestamp > timestamp
# Corrupting output.pkl to make sure that an error happens when
# loading the cached result
corrupt_single_cache_item(memory)
reference = cached_func.call_and_shelve(arg)
try:
reference.get()
raise AssertionError(
"It normally not possible to load a corrupted"
" MemorizedResult"
)
except KeyError as e:
message = "is corrupted"
assert message in str(e.args)
def test_deprecated_cachedir_behaviour(tmpdir):
# verify the right deprecation warnings are raised when using cachedir
# option instead of new location parameter.
with warns(None) as w:
memory = Memory(cachedir=tmpdir.strpath, verbose=0)
assert memory.store_backend.location.startswith(tmpdir.strpath)
assert len(w) == 1
assert "The 'cachedir' parameter has been deprecated" in str(w[-1].message)
with warns(None) as w:
memory = Memory()
assert memory.cachedir is None
assert len(w) == 1
assert "The 'cachedir' attribute has been deprecated" in str(w[-1].message)
error_regex = """You set both "location='.+ and "cachedir='.+"""
with raises(ValueError, match=error_regex):
memory = Memory(location=tmpdir.strpath, cachedir=tmpdir.strpath,
verbose=0)
class IncompleteStoreBackend(StoreBackendBase):
"""This backend cannot be instanciated and should raise a TypeError."""
pass
class DummyStoreBackend(StoreBackendBase):
"""A dummy store backend that does nothing."""
def _open_item(self, *args, **kwargs):
"""Open an item on store."""
"Does nothing"
def _item_exists(self, location):
"""Check if an item location exists."""
"Does nothing"
def _move_item(self, src, dst):
"""Move an item from src to dst in store."""
"Does nothing"
def create_location(self, location):
"""Create location on store."""
"Does nothing"
def exists(self, obj):
"""Check if an object exists in the store"""
return False
def clear_location(self, obj):
"""Clear object on store"""
"Does nothing"
def get_items(self):
"""Returns the whole list of items available in cache."""
return []
def configure(self, location, *args, **kwargs):
"""Configure the store"""
"Does nothing"
@parametrize("invalid_prefix", [None, dict(), list()])
def test_register_invalid_store_backends_key(invalid_prefix):
# verify the right exceptions are raised when passing a wrong backend key.
with raises(ValueError) as excinfo:
register_store_backend(invalid_prefix, None)
excinfo.match(r'Store backend name should be a string*')
def test_register_invalid_store_backends_object():
# verify the right exceptions are raised when passing a wrong backend
# object.
with raises(ValueError) as excinfo:
register_store_backend("fs", None)
excinfo.match(r'Store backend should inherit StoreBackendBase*')
def test_memory_default_store_backend():
# test an unknow backend falls back into a FileSystemStoreBackend
with raises(TypeError) as excinfo:
Memory(location='/tmp/joblib', backend='unknown')
excinfo.match(r"Unknown location*")
def test_warning_on_unknown_location_type():
class NonSupportedLocationClass:
pass
unsupported_location = NonSupportedLocationClass()
with warns(UserWarning) as warninfo:
_store_backend_factory("local", location=unsupported_location)
expected_mesage = ("Instanciating a backend using a "
"NonSupportedLocationClass as a location is not "
"supported by joblib")
assert expected_mesage in str(warninfo[0].message)
def test_instanciate_incomplete_store_backend():
# Verify that registering an external incomplete store backend raises an
# exception when one tries to instanciate it.
backend_name = "isb"
register_store_backend(backend_name, IncompleteStoreBackend)
assert (backend_name, IncompleteStoreBackend) in _STORE_BACKENDS.items()
with raises(TypeError) as excinfo:
_store_backend_factory(backend_name, "fake_location")
excinfo.match(r"Can't instantiate abstract class "
"IncompleteStoreBackend with abstract methods*")
def test_dummy_store_backend():
# Verify that registering an external store backend works.
backend_name = "dsb"
register_store_backend(backend_name, DummyStoreBackend)
assert (backend_name, DummyStoreBackend) in _STORE_BACKENDS.items()
backend_obj = _store_backend_factory(backend_name, "dummy_location")
assert isinstance(backend_obj, DummyStoreBackend)
@pytest.mark.skipif(sys.version_info[:2] < (3, 4),
reason="pathlib is available for python versions >= 3.4")
def test_instanciate_store_backend_with_pathlib_path():
# Instanciate a FileSystemStoreBackend using a pathlib.Path object
path = pathlib.Path("some_folder")
backend_obj = _store_backend_factory("local", path)
assert backend_obj.location == "some_folder"
def test_filesystem_store_backend_repr(tmpdir):
# Verify string representation of a filesystem store backend.
repr_pattern = 'FileSystemStoreBackend(location="{location}")'
backend = FileSystemStoreBackend()
assert backend.location is None
repr(backend) # Should not raise an exception
assert str(backend) == repr_pattern.format(location=None)
# backend location is passed explicitely via the configure method (called
# by the internal _store_backend_factory function)
backend.configure(tmpdir.strpath)
assert str(backend) == repr_pattern.format(location=tmpdir.strpath)
repr(backend) # Should not raise an exception
def test_memory_objects_repr(tmpdir):
# Verify printable reprs of MemorizedResult, MemorizedFunc and Memory.
def my_func(a, b):
return a + b
memory = Memory(location=tmpdir.strpath, verbose=0)
memorized_func = memory.cache(my_func)
memorized_func_repr = 'MemorizedFunc(func={func}, location={location})'
assert str(memorized_func) == memorized_func_repr.format(
func=my_func,
location=memory.store_backend.location)
memorized_result = memorized_func.call_and_shelve(42, 42)
memorized_result_repr = ('MemorizedResult(location="{location}", '
'func="{func}", args_id="{args_id}")')
assert str(memorized_result) == memorized_result_repr.format(
location=memory.store_backend.location,
func=memorized_result.func_id,
args_id=memorized_result.args_id)
assert str(memory) == 'Memory(location={location})'.format(
location=memory.store_backend.location)
def test_memorized_result_pickle(tmpdir):
# Verify a MemoryResult object can be pickled/depickled. Non regression
# test introduced following issue
# https://github.com/joblib/joblib/issues/747
memory = Memory(location=tmpdir.strpath)
@memory.cache
def g(x):
return x**2
memorized_result = g.call_and_shelve(4)
memorized_result_pickle = pickle.dumps(memorized_result)
memorized_result_loads = pickle.loads(memorized_result_pickle)
assert memorized_result.store_backend.location == \
memorized_result_loads.store_backend.location
assert memorized_result.func == memorized_result_loads.func
assert memorized_result.args_id == memorized_result_loads.args_id
assert str(memorized_result) == str(memorized_result_loads)
def compare(left, right, ignored_attrs=None):
if ignored_attrs is None:
ignored_attrs = []
left_vars = vars(left)
right_vars = vars(right)
assert set(left_vars.keys()) == set(right_vars.keys())
for attr in left_vars.keys():
if attr in ignored_attrs:
continue
assert left_vars[attr] == right_vars[attr]
@pytest.mark.parametrize('memory_kwargs',
[{'compress': 3, 'verbose': 2},
{'mmap_mode': 'r', 'verbose': 5, 'bytes_limit': 1e6,
'backend_options': {'parameter': 'unused'}}])
def test_memory_pickle_dump_load(tmpdir, memory_kwargs):
memory = Memory(location=tmpdir.strpath, **memory_kwargs)
memory_reloaded = pickle.loads(pickle.dumps(memory))
# Compare Memory instance before and after pickle roundtrip
compare(memory.store_backend, memory_reloaded.store_backend)
compare(memory, memory_reloaded,
ignored_attrs=set(['store_backend', 'timestamp']))
assert hash(memory) == hash(memory_reloaded)
func_cached = memory.cache(f)
func_cached_reloaded = pickle.loads(pickle.dumps(func_cached))
# Compare MemorizedFunc instance before/after pickle roundtrip
compare(func_cached.store_backend, func_cached_reloaded.store_backend)
compare(func_cached, func_cached_reloaded,
ignored_attrs=set(['store_backend', 'timestamp']))
assert hash(func_cached) == hash(func_cached_reloaded)
# Compare MemorizedResult instance before/after pickle roundtrip
memorized_result = func_cached.call_and_shelve(1)
memorized_result_reloaded = pickle.loads(pickle.dumps(memorized_result))
compare(memorized_result.store_backend,
memorized_result_reloaded.store_backend)
compare(memorized_result, memorized_result_reloaded,
ignored_attrs=set(['store_backend', 'timestamp']))
assert hash(memorized_result) == hash(memorized_result_reloaded)
| lesteve/joblib | joblib/test/test_memory.py | Python | bsd-3-clause | 39,966 | 0.0002 |
'''
Created on Jul 9, 2014
@author: viejoemer
HowTo remove an arbitrary element and retrieve that item at the same time?
¿Cómo eliminar un elemento de forma arbitraria y recuperar ese elemento
al mismo tiempo?
pop()
Remove and return an arbitrary element from the set. Raises KeyError
if the set is empty.
'''
#Create a set with values.
s_1 = set([1,2,3])
print("set one", s_1)
s_2 = set()
print("set one", s_2)
#Removing a element
value = s_1.pop()
print("Element removed",s_1)
print("Value removed",value)
#If the set is empty return an error
value = s_2.pop() | OxPython/Python_set_pop | src/set_pop.py | Python | epl-1.0 | 574 | 0.017483 |
"""Classes that determine convergence of an algorithm run
based on population stagnation or max raw score reached"""
class Convergence(object):
"""
Base class for all convergence object to be based on.
It is necessary to supply the population instance, to be
able to obtain current and former populations.
"""
def __init__(self, population_instance):
self.pop = population_instance
self.pops = {}
def converged(self):
"""This function is called to find out if the algorithm
run has converged, it should return True or False.
Overwrite this in the inherited class."""
raise NotImplementedError
def populate_pops(self, to_gen):
"""Populate the pops dictionary with how the population
looked after i number of generations."""
for i in range(to_gen):
if i not in self.pops.keys():
self.pops[i] = self.pop.get_population_after_generation(i)
class GenerationRepetitionConvergence(Convergence):
"""Returns True if the latest finished population is stagnated for
number_of_generations.
Parameters:
number_of_generations: int
How many generations need to be equal before convergence.
number_of_individuals: int
How many of the fittest individuals should be included in the
convergence test. Default is -1 meaning all in the population.
max_generations: int
The maximum number of generations the GA is allowed to run.
Default is indefinite.
"""
def __init__(self, population_instance, number_of_generations,
number_of_individuals=-1, max_generations=100000000):
Convergence.__init__(self, population_instance)
self.numgens = number_of_generations
self.numindis = number_of_individuals
self.maxgen = max_generations
def converged(self):
size = self.pop.pop_size
cur_gen_num = self.pop.dc.get_generation_number(size)
if cur_gen_num >= self.maxgen:
return True
if cur_gen_num <= 1:
return False
cur_pop = self.pop.get_current_population()
newest = max([i.info['key_value_pairs']['generation']
for i in cur_pop[:self.numindis]])
if newest + self.numgens > cur_gen_num:
return False
self.populate_pops(cur_gen_num)
duplicate_gens = 1
latest_pop = self.pops[cur_gen_num - 1]
for i in range(cur_gen_num - 2, -1, -1):
test_pop = self.pops[i]
if test_pop[:self.numindis] == latest_pop[:self.numindis]:
duplicate_gens += 1
if duplicate_gens >= self.numgens:
return True
return False
class RawScoreConvergence(Convergence):
"""Returns True if the supplied max_raw_score has been reached"""
def __init__(self, population_instance, max_raw_score, eps=1e-3):
Convergence.__init__(self, population_instance)
self.max_raw_score = max_raw_score
self.eps = eps
def converged(self):
cur_pop = self.pop.get_current_population()
if abs(cur_pop[0].get_raw_score() - self.max_raw_score) <= self.eps:
return True
return False
class NeverConvergence(object):
"""Test class that never converges."""
def __init__(self):
pass
def converged(self):
return False
| suttond/MODOI | ase/ga/convergence.py | Python | lgpl-3.0 | 3,442 | 0.000581 |
#!/usr/bin/python
from k5test import *
for realm in multipass_realms(create_user=False):
# Test kinit with a keytab.
realm.kinit(realm.host_princ, flags=['-k'])
realm = K5Realm(get_creds=False, start_kadmind=True)
# Test kinit with a partial keytab.
pkeytab = realm.keytab + '.partial'
realm.run([ktutil], input=('rkt %s\ndelent 1\nwkt %s\n' %
(realm.keytab, pkeytab)))
realm.kinit(realm.host_princ, flags=['-k', '-t', pkeytab])
# Test kinit with no keys for client in keytab.
realm.kinit(realm.user_princ, flags=['-k'], expected_code=1,
expected_msg='no suitable keys')
# Test kinit and klist with client keytab defaults.
realm.extract_keytab(realm.user_princ, realm.client_keytab);
realm.run([kinit, '-k', '-i'])
realm.klist(realm.user_princ)
realm.run([kdestroy])
realm.kinit(realm.user_princ, flags=['-k', '-i'])
realm.klist(realm.user_princ)
out = realm.run([klist, '-k', '-i'])
if realm.client_keytab not in out or realm.user_princ not in out:
fail('Expected output not seen from klist -k -i')
# Test implicit request for keytab (-i or -t without -k)
realm.run([kdestroy])
realm.kinit(realm.host_princ, flags=['-t', realm.keytab],
expected_msg='keytab specified, forcing -k')
realm.klist(realm.host_princ)
realm.run([kdestroy])
realm.kinit(realm.user_princ, flags=['-i'],
expected_msg='keytab specified, forcing -k')
realm.klist(realm.user_princ)
# Test extracting keys with multiple key versions present.
os.remove(realm.keytab)
realm.run([kadminl, 'cpw', '-randkey', '-keepold', realm.host_princ])
out = realm.run([kadminl, 'ktadd', '-norandkey', realm.host_princ])
if 'with kvno 1,' not in out or 'with kvno 2,' not in out:
fail('Expected output not seen from kadmin.local ktadd -norandkey')
out = realm.run([klist, '-k', '-e'])
if ' 1 host/' not in out or ' 2 host/' not in out:
fail('Expected output not seen from klist -k -e')
# Test again using kadmin over the network.
realm.prep_kadmin()
os.remove(realm.keytab)
out = realm.run_kadmin(['ktadd', '-norandkey', realm.host_princ])
if 'with kvno 1,' not in out or 'with kvno 2,' not in out:
fail('Expected output not seen from kadmin.local ktadd -norandkey')
out = realm.run([klist, '-k', '-e'])
if ' 1 host/' not in out or ' 2 host/' not in out:
fail('Expected output not seen from klist -k -e')
# Test handling of kvno values beyond 255. Use kadmin over the
# network since we used to have an 8-bit limit on kvno marshalling.
# Test one key rotation, verifying that the expected new kvno appears
# in the keytab and in the principal entry.
def test_key_rotate(realm, princ, expected_kvno):
realm.run_kadmin(['ktadd', '-k', realm.keytab, princ])
realm.run([kadminl, 'ktrem', princ, 'old'])
realm.kinit(princ, flags=['-k'])
msg = '%d %s' % (expected_kvno, princ)
out = realm.run([klist, '-k'], expected_msg=msg)
msg = 'Key: vno %d,' % expected_kvno
out = realm.run_kadmin(['getprinc', princ], expected_msg=msg)
princ = 'foo/bar@%s' % realm.realm
realm.addprinc(princ)
os.remove(realm.keytab)
realm.run([kadminl, 'modprinc', '-kvno', '253', princ])
test_key_rotate(realm, princ, 254)
test_key_rotate(realm, princ, 255)
test_key_rotate(realm, princ, 256)
test_key_rotate(realm, princ, 257)
realm.run([kadminl, 'modprinc', '-kvno', '32766', princ])
test_key_rotate(realm, princ, 32767)
test_key_rotate(realm, princ, 32768)
test_key_rotate(realm, princ, 32769)
realm.run([kadminl, 'modprinc', '-kvno', '65534', princ])
test_key_rotate(realm, princ, 65535)
test_key_rotate(realm, princ, 1)
test_key_rotate(realm, princ, 2)
# Test that klist -k can read a keytab entry without a 32-bit kvno and
# reports the 8-bit key version.
record = '\x00\x01' # principal component count
record += '\x00\x0bKRBTEST.COM' # realm
record += '\x00\x04user' # principal component
record += '\x00\x00\x00\x01' # name type (NT-PRINCIPAL)
record += '\x54\xf7\x4d\x35' # timestamp
record += '\x02' # key version
record += '\x00\x12' # enctype
record += '\x00\x20' # key length
record += '\x00' * 32 # key bytes
f = open(realm.keytab, 'w')
f.write('\x05\x02\x00\x00\x00' + chr(len(record)))
f.write(record)
f.close()
msg = ' 2 %s' % realm.user_princ
out = realm.run([klist, '-k'], expected_msg=msg)
# Make sure zero-fill isn't treated as a 32-bit kvno.
f = open(realm.keytab, 'w')
f.write('\x05\x02\x00\x00\x00' + chr(len(record) + 4))
f.write(record)
f.write('\x00\x00\x00\x00')
f.close()
msg = ' 2 %s' % realm.user_princ
out = realm.run([klist, '-k'], expected_msg=msg)
# Make sure a hand-crafted 32-bit kvno is recognized.
f = open(realm.keytab, 'w')
f.write('\x05\x02\x00\x00\x00' + chr(len(record) + 4))
f.write(record)
f.write('\x00\x00\x00\x03')
f.close()
msg = ' 3 %s' % realm.user_princ
out = realm.run([klist, '-k'], expected_msg=msg)
# Test parameter expansion in profile variables
realm.stop()
conf = {'libdefaults': {
'default_keytab_name': 'testdir/%{null}abc%{uid}',
'default_client_keytab_name': 'testdir/%{null}xyz%{uid}'}}
realm = K5Realm(krb5_conf=conf, create_kdb=False)
del realm.env['KRB5_KTNAME']
del realm.env['KRB5_CLIENT_KTNAME']
uidstr = str(os.getuid())
msg = 'FILE:testdir/abc%s' % uidstr
out = realm.run([klist, '-k'], expected_code=1, expected_msg=msg)
msg = 'FILE:testdir/xyz%s' % uidstr
out = realm.run([klist, '-ki'], expected_code=1, expected_msg=msg)
success('Keytab-related tests')
| gerritjvv/cryptoplayground | kerberos/kdc/src/krb5-1.16/src/tests/t_keytab.py | Python | apache-2.0 | 5,482 | 0.00073 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port
from neutron_lib.tests.unit.api.definitions import base
class PortDefinitionTestCase(base.DefinitionBaseTestCase):
extension_module = port
extension_attributes = ()
| openstack/neutron-lib | neutron_lib/tests/unit/api/definitions/test_port.py | Python | apache-2.0 | 793 | 0 |
import ardurpc
from ardurpc.handler import Handler
class Base(Handler):
"""Handler for the Base Text-LCD type"""
def __init__(self, **kwargs):
Handler.__init__(self, **kwargs)
def getWidth(self):
"""
Get the display width as number of characters.
:return: Width
:rtype: Integer
"""
return self._call(0x01)
def getHeight(self):
"""
Get the display height as number of characters.
:return: Height
:rtype: Integer
"""
return self._call(0x02)
def clear(self):
"""
Clear the LCD screen and set the cursor position to the upper-left corner.
"""
return self._call(0x11)
def home(self):
"""
Set the cursor position to the upper-left corner.
"""
return self._call(0x12)
def setCursor(self, col, row):
"""
Position the cursor.
"""
return self._call(0x13, '>BB', col, row)
def write(self, c):
"""
Print a single character to the LCD.
"""
c = c.encode('ASCII')
return self._call(0x21, '>B', c[0])
def print(self, s):
"""
Print text to the LCD.
"""
s = s.encode('ASCII')
return self._call(0x22, '>B%ds' % len(s), len(s), s)
ardurpc.register(0x0300, Base, mask=8)
| DinoTools/ArduRPC-python | ardurpc/handler/lcd/__init__.py | Python | lgpl-3.0 | 1,384 | 0.001445 |
# Copyright (c) 2012 NetApp, Inc. All rights reserved.
# Copyright (c) 2014 Ben Swartzlander. All rights reserved.
# Copyright (c) 2014 Navneet Singh. All rights reserved.
# Copyright (c) 2014 Clinton Knight. All rights reserved.
# Copyright (c) 2014 Alex Meade. All rights reserved.
# Copyright (c) 2014 Bob Callaway. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver for NetApp NFS storage.
"""
import os
import uuid
from oslo_utils import units
import six
from cinder import exception
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import image_utils
from cinder.openstack.common import log as logging
from cinder import utils
from cinder.volume.drivers.netapp.dataontap.client import api as na_api
from cinder.volume.drivers.netapp.dataontap.client import client_cmode
from cinder.volume.drivers.netapp.dataontap import nfs_base
from cinder.volume.drivers.netapp.dataontap import ssc_cmode
from cinder.volume.drivers.netapp import options as na_opts
from cinder.volume.drivers.netapp import utils as na_utils
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
class NetAppCmodeNfsDriver(nfs_base.NetAppNfsDriver):
"""NetApp NFS driver for Data ONTAP (Cluster-mode)."""
REQUIRED_CMODE_FLAGS = ['netapp_vserver']
def __init__(self, *args, **kwargs):
super(NetAppCmodeNfsDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(na_opts.netapp_cluster_opts)
def do_setup(self, context):
"""Do the customized set up on client for cluster mode."""
super(NetAppCmodeNfsDriver, self).do_setup(context)
na_utils.check_flags(self.REQUIRED_CMODE_FLAGS, self.configuration)
self.vserver = self.configuration.netapp_vserver
self.zapi_client = client_cmode.Client(
transport_type=self.configuration.netapp_transport_type,
username=self.configuration.netapp_login,
password=self.configuration.netapp_password,
hostname=self.configuration.netapp_server_hostname,
port=self.configuration.netapp_server_port,
vserver=self.vserver)
self.ssc_enabled = True
self.ssc_vols = None
self.stale_vols = set()
def check_for_setup_error(self):
"""Check that the driver is working and can communicate."""
super(NetAppCmodeNfsDriver, self).check_for_setup_error()
ssc_cmode.check_ssc_api_permissions(self.zapi_client)
def create_volume(self, volume):
"""Creates a volume.
:param volume: volume reference
"""
LOG.debug('create_volume on %s' % volume['host'])
self._ensure_shares_mounted()
# get share as pool name
share = volume_utils.extract_host(volume['host'], level='pool')
if share is None:
msg = _("Pool is not available in the volume host field.")
raise exception.InvalidHost(reason=msg)
extra_specs = na_utils.get_volume_extra_specs(volume)
qos_policy_group = extra_specs.pop('netapp:qos_policy_group', None) \
if extra_specs else None
# warn on obsolete extra specs
na_utils.log_extra_spec_warnings(extra_specs)
try:
volume['provider_location'] = share
LOG.info(_LI('casted to %s') % volume['provider_location'])
self._do_create_volume(volume)
if qos_policy_group:
self._set_qos_policy_group_on_volume(volume, share,
qos_policy_group)
return {'provider_location': volume['provider_location']}
except Exception as ex:
LOG.error(_LW("Exception creating vol %(name)s on "
"share %(share)s. Details: %(ex)s")
% {'name': volume['name'],
'share': volume['provider_location'],
'ex': ex})
volume['provider_location'] = None
finally:
if self.ssc_enabled:
self._update_stale_vols(self._get_vol_for_share(share))
msg = _("Volume %s could not be created on shares.")
raise exception.VolumeBackendAPIException(data=msg % (volume['name']))
def _set_qos_policy_group_on_volume(self, volume, share, qos_policy_group):
target_path = '%s' % (volume['name'])
export_path = share.split(':')[1]
flex_vol_name = self.zapi_client.get_vol_by_junc_vserver(self.vserver,
export_path)
self.zapi_client.file_assign_qos(flex_vol_name,
qos_policy_group,
target_path)
def _check_volume_type(self, volume, share, file_name):
"""Match volume type for share file."""
extra_specs = na_utils.get_volume_extra_specs(volume)
qos_policy_group = extra_specs.pop('netapp:qos_policy_group', None) \
if extra_specs else None
if not self._is_share_vol_type_match(volume, share):
raise exception.ManageExistingVolumeTypeMismatch(
reason=(_("Volume type does not match for share %s."),
share))
if qos_policy_group:
try:
vserver, flex_vol_name = self._get_vserver_and_exp_vol(
share=share)
self.zapi_client.file_assign_qos(flex_vol_name,
qos_policy_group,
file_name)
except na_api.NaApiError as ex:
LOG.exception(_LE('Setting file QoS policy group failed. %s'),
ex)
raise exception.NetAppDriverException(
reason=(_('Setting file QoS policy group failed. %s'), ex))
def _clone_volume(self, volume_name, clone_name,
volume_id, share=None):
"""Clones mounted volume on NetApp Cluster."""
(vserver, exp_volume) = self._get_vserver_and_exp_vol(volume_id, share)
self.zapi_client.clone_file(exp_volume, volume_name, clone_name,
vserver)
share = share if share else self._get_provider_location(volume_id)
self._post_prov_deprov_in_ssc(share)
def _get_vserver_and_exp_vol(self, volume_id=None, share=None):
"""Gets the vserver and export volume for share."""
(host_ip, export_path) = self._get_export_ip_path(volume_id, share)
ifs = self.zapi_client.get_if_info_by_ip(host_ip)
vserver = ifs[0].get_child_content('vserver')
exp_volume = self.zapi_client.get_vol_by_junc_vserver(vserver,
export_path)
return vserver, exp_volume
def _update_volume_stats(self):
"""Retrieve stats info from vserver."""
self._ensure_shares_mounted()
sync = True if self.ssc_vols is None else False
ssc_cmode.refresh_cluster_ssc(self, self.zapi_client.connection,
self.vserver, synchronous=sync)
LOG.debug('Updating volume stats')
data = {}
netapp_backend = 'NetApp_NFS_Cluster_direct'
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or netapp_backend
data['vendor_name'] = 'NetApp'
data['driver_version'] = self.VERSION
data['storage_protocol'] = 'nfs'
data['pools'] = self._get_pool_stats()
self._spawn_clean_cache_job()
self.zapi_client.provide_ems(self, netapp_backend, self._app_version)
self._stats = data
def _get_pool_stats(self):
"""Retrieve pool (i.e. NFS share) stats info from SSC volumes."""
pools = []
for nfs_share in self._mounted_shares:
capacity = self._get_extended_capacity_info(nfs_share)
pool = dict()
pool['pool_name'] = nfs_share
pool['QoS_support'] = False
pool['reserved_percentage'] = 0
# Report pool as reserved when over the configured used_ratio
if capacity['used_ratio'] > self.configuration.nfs_used_ratio:
pool['reserved_percentage'] = 100
# Report pool as reserved when over the subscribed ratio
if capacity['subscribed_ratio'] >=\
self.configuration.nfs_oversub_ratio:
pool['reserved_percentage'] = 100
# convert sizes to GB
total = float(capacity['apparent_size']) / units.Gi
pool['total_capacity_gb'] = na_utils.round_down(total, '0.01')
free = float(capacity['apparent_available']) / units.Gi
pool['free_capacity_gb'] = na_utils.round_down(free, '0.01')
# add SSC content if available
vol = self._get_vol_for_share(nfs_share)
if vol and self.ssc_vols:
pool['netapp_raid_type'] = vol.aggr['raid_type']
pool['netapp_disk_type'] = vol.aggr['disk_type']
mirrored = vol in self.ssc_vols['mirrored']
pool['netapp_mirrored'] = six.text_type(mirrored).lower()
pool['netapp_unmirrored'] = six.text_type(not mirrored).lower()
dedup = vol in self.ssc_vols['dedup']
pool['netapp_dedup'] = six.text_type(dedup).lower()
pool['netapp_nodedup'] = six.text_type(not dedup).lower()
compression = vol in self.ssc_vols['compression']
pool['netapp_compression'] = six.text_type(compression).lower()
pool['netapp_nocompression'] = six.text_type(
not compression).lower()
thin = vol in self.ssc_vols['thin']
pool['netapp_thin_provisioned'] = six.text_type(thin).lower()
pool['netapp_thick_provisioned'] = six.text_type(
not thin).lower()
pools.append(pool)
return pools
@utils.synchronized('update_stale')
def _update_stale_vols(self, volume=None, reset=False):
"""Populates stale vols with vol and returns set copy."""
if volume:
self.stale_vols.add(volume)
set_copy = self.stale_vols.copy()
if reset:
self.stale_vols.clear()
return set_copy
@utils.synchronized("refresh_ssc_vols")
def refresh_ssc_vols(self, vols):
"""Refreshes ssc_vols with latest entries."""
if not self._mounted_shares:
LOG.warning(_LW("No shares found hence skipping ssc refresh."))
return
mnt_share_vols = set()
vs_ifs = self.zapi_client.get_vserver_ips(self.vserver)
for vol in vols['all']:
for sh in self._mounted_shares:
host = sh.split(':')[0]
junction = sh.split(':')[1]
ip = na_utils.resolve_hostname(host)
if (self._ip_in_ifs(ip, vs_ifs) and
junction == vol.id['junction_path']):
mnt_share_vols.add(vol)
vol.export['path'] = sh
break
for key in vols.keys():
vols[key] = vols[key] & mnt_share_vols
self.ssc_vols = vols
def _ip_in_ifs(self, ip, api_ifs):
"""Checks if ip is listed for ifs in API format."""
if api_ifs is None:
return False
for ifc in api_ifs:
ifc_ip = ifc.get_child_content("address")
if ifc_ip == ip:
return True
return False
def _shortlist_del_eligible_files(self, share, old_files):
"""Prepares list of eligible files to be deleted from cache."""
file_list = []
(vserver, exp_volume) = self._get_vserver_and_exp_vol(
volume_id=None, share=share)
for file in old_files:
path = '/vol/%s/%s' % (exp_volume, file)
u_bytes = self.zapi_client.get_file_usage(path, vserver)
file_list.append((file, u_bytes))
LOG.debug('Shortlisted files eligible for deletion: %s', file_list)
return file_list
def _share_match_for_ip(self, ip, shares):
"""Returns the share that is served by ip.
Multiple shares can have same dir path but
can be served using different ips. It finds the
share which is served by ip on same nfs server.
"""
ip_vserver = self._get_vserver_for_ip(ip)
if ip_vserver and shares:
for share in shares:
ip_sh = share.split(':')[0]
sh_vserver = self._get_vserver_for_ip(ip_sh)
if sh_vserver == ip_vserver:
LOG.debug('Share match found for ip %s', ip)
return share
LOG.debug('No share match found for ip %s', ip)
return None
def _get_vserver_for_ip(self, ip):
"""Get vserver for the mentioned ip."""
try:
ifs = self.zapi_client.get_if_info_by_ip(ip)
vserver = ifs[0].get_child_content('vserver')
return vserver
except Exception:
return None
def _get_vol_for_share(self, nfs_share):
"""Gets the ssc vol with given share."""
if self.ssc_vols:
for vol in self.ssc_vols['all']:
if vol.export['path'] == nfs_share:
return vol
return None
def _is_share_vol_compatible(self, volume, share):
"""Checks if share is compatible with volume to host it."""
compatible = self._is_share_eligible(share, volume['size'])
if compatible and self.ssc_enabled:
matched = self._is_share_vol_type_match(volume, share)
compatible = compatible and matched
return compatible
def _is_share_vol_type_match(self, volume, share):
"""Checks if share matches volume type."""
netapp_vol = self._get_vol_for_share(share)
LOG.debug("Found volume %(vol)s for share %(share)s."
% {'vol': netapp_vol, 'share': share})
extra_specs = na_utils.get_volume_extra_specs(volume)
vols = ssc_cmode.get_volumes_for_specs(self.ssc_vols, extra_specs)
return netapp_vol in vols
def delete_volume(self, volume):
"""Deletes a logical volume."""
share = volume['provider_location']
super(NetAppCmodeNfsDriver, self).delete_volume(volume)
self._post_prov_deprov_in_ssc(share)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
share = self._get_provider_location(snapshot.volume_id)
super(NetAppCmodeNfsDriver, self).delete_snapshot(snapshot)
self._post_prov_deprov_in_ssc(share)
def _post_prov_deprov_in_ssc(self, share):
if self.ssc_enabled and share:
netapp_vol = self._get_vol_for_share(share)
if netapp_vol:
self._update_stale_vols(volume=netapp_vol)
def copy_image_to_volume(self, context, volume, image_service, image_id):
"""Fetch the image from image_service and write it to the volume."""
copy_success = False
try:
major, minor = self.zapi_client.get_ontapi_version()
col_path = self.configuration.netapp_copyoffload_tool_path
if major == 1 and minor >= 20 and col_path:
self._try_copyoffload(context, volume, image_service, image_id)
copy_success = True
LOG.info(_LI('Copied image %(img)s to volume %(vol)s using '
'copy offload workflow.')
% {'img': image_id, 'vol': volume['id']})
else:
LOG.debug("Copy offload either not configured or"
" unsupported.")
except Exception as e:
LOG.exception(_LE('Copy offload workflow unsuccessful. %s'), e)
finally:
if not copy_success:
super(NetAppCmodeNfsDriver, self).copy_image_to_volume(
context, volume, image_service, image_id)
if self.ssc_enabled:
sh = self._get_provider_location(volume['id'])
self._update_stale_vols(self._get_vol_for_share(sh))
def _try_copyoffload(self, context, volume, image_service, image_id):
"""Tries server side file copy offload."""
copied = False
cache_result = self._find_image_in_cache(image_id)
if cache_result:
copied = self._copy_from_cache(volume, image_id, cache_result)
if not cache_result or not copied:
self._copy_from_img_service(context, volume, image_service,
image_id)
def _get_ip_verify_on_cluster(self, host):
"""Verifies if host on same cluster and returns ip."""
ip = na_utils.resolve_hostname(host)
vserver = self._get_vserver_for_ip(ip)
if not vserver:
raise exception.NotFound(_("Unable to locate an SVM that is "
"managing the IP address '%s'") % ip)
return ip
def _copy_from_cache(self, volume, image_id, cache_result):
"""Try copying image file_name from cached file_name."""
LOG.debug("Trying copy from cache using copy offload.")
copied = False
for res in cache_result:
try:
(share, file_name) = res
LOG.debug("Found cache file_name on share %s.", share)
if share != self._get_provider_location(volume['id']):
col_path = self.configuration.netapp_copyoffload_tool_path
src_ip = self._get_ip_verify_on_cluster(
share.split(':')[0])
src_path = os.path.join(share.split(':')[1], file_name)
dst_ip = self._get_ip_verify_on_cluster(self._get_host_ip(
volume['id']))
dst_path = os.path.join(
self._get_export_path(volume['id']), volume['name'])
self._execute(col_path, src_ip, dst_ip,
src_path, dst_path,
run_as_root=self._execute_as_root,
check_exit_code=0)
self._register_image_in_cache(volume, image_id)
LOG.debug("Copied image from cache to volume %s using"
" copy offload.", volume['id'])
else:
self._clone_file_dst_exists(share, file_name,
volume['name'],
dest_exists=True)
LOG.debug("Copied image from cache to volume %s using"
" cloning.", volume['id'])
self._post_clone_image(volume)
copied = True
break
except Exception as e:
LOG.exception(_LE('Error in workflow copy from cache. %s.'), e)
return copied
def _clone_file_dst_exists(self, share, src_name, dst_name,
dest_exists=False):
"""Clone file even if dest exists."""
(vserver, exp_volume) = self._get_vserver_and_exp_vol(share=share)
self.zapi_client.clone_file(exp_volume, src_name, dst_name, vserver,
dest_exists=dest_exists)
def _copy_from_img_service(self, context, volume, image_service,
image_id):
"""Copies from the image service using copy offload."""
LOG.debug("Trying copy from image service using copy offload.")
image_loc = image_service.get_location(context, image_id)
image_loc = self._construct_image_nfs_url(image_loc)
conn, dr = self._check_get_nfs_path_segs(image_loc)
if conn:
src_ip = self._get_ip_verify_on_cluster(conn.split(':')[0])
else:
raise exception.NotFound(_("Source host details not found."))
(__, ___, img_file) = image_loc.rpartition('/')
src_path = os.path.join(dr, img_file)
dst_ip = self._get_ip_verify_on_cluster(self._get_host_ip(
volume['id']))
# tmp file is required to deal with img formats
tmp_img_file = six.text_type(uuid.uuid4())
col_path = self.configuration.netapp_copyoffload_tool_path
img_info = image_service.show(context, image_id)
dst_share = self._get_provider_location(volume['id'])
self._check_share_can_hold_size(dst_share, img_info['size'])
run_as_root = self._execute_as_root
dst_dir = self._get_mount_point_for_share(dst_share)
dst_img_local = os.path.join(dst_dir, tmp_img_file)
try:
# If src and dst share not equal
if (('%s:%s' % (src_ip, dr)) !=
('%s:%s' % (dst_ip, self._get_export_path(volume['id'])))):
dst_img_serv_path = os.path.join(
self._get_export_path(volume['id']), tmp_img_file)
self._execute(col_path, src_ip, dst_ip, src_path,
dst_img_serv_path, run_as_root=run_as_root,
check_exit_code=0)
else:
self._clone_file_dst_exists(dst_share, img_file, tmp_img_file)
self._discover_file_till_timeout(dst_img_local, timeout=120)
LOG.debug('Copied image %(img)s to tmp file %(tmp)s.'
% {'img': image_id, 'tmp': tmp_img_file})
dst_img_cache_local = os.path.join(dst_dir,
'img-cache-%s' % image_id)
if img_info['disk_format'] == 'raw':
LOG.debug('Image is raw %s.', image_id)
self._clone_file_dst_exists(dst_share, tmp_img_file,
volume['name'], dest_exists=True)
self._move_nfs_file(dst_img_local, dst_img_cache_local)
LOG.debug('Copied raw image %(img)s to volume %(vol)s.'
% {'img': image_id, 'vol': volume['id']})
else:
LOG.debug('Image will be converted to raw %s.', image_id)
img_conv = six.text_type(uuid.uuid4())
dst_img_conv_local = os.path.join(dst_dir, img_conv)
# Checking against image size which is approximate check
self._check_share_can_hold_size(dst_share, img_info['size'])
try:
image_utils.convert_image(dst_img_local,
dst_img_conv_local, 'raw',
run_as_root=run_as_root)
data = image_utils.qemu_img_info(dst_img_conv_local,
run_as_root=run_as_root)
if data.file_format != "raw":
raise exception.InvalidResults(
_("Converted to raw, but format is now %s.")
% data.file_format)
else:
self._clone_file_dst_exists(dst_share, img_conv,
volume['name'],
dest_exists=True)
self._move_nfs_file(dst_img_conv_local,
dst_img_cache_local)
LOG.debug('Copied locally converted raw image'
' %(img)s to volume %(vol)s.'
% {'img': image_id, 'vol': volume['id']})
finally:
if os.path.exists(dst_img_conv_local):
self._delete_file(dst_img_conv_local)
self._post_clone_image(volume)
finally:
if os.path.exists(dst_img_local):
self._delete_file(dst_img_local)
| Akrog/cinder | cinder/volume/drivers/netapp/dataontap/nfs_cmode.py | Python | apache-2.0 | 24,731 | 0 |
# Copyright 2022 The DDSP Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Library of training functions."""
import inspect
import json
import os
import time
from absl import logging
from ddsp.training import cloud
import gin
import tensorflow.compat.v2 as tf
# ---------------------- Helper Functions --------------------------------------
def get_strategy(tpu='', cluster_config=''):
"""Create a distribution strategy for running on accelerators.
For CPU, single-GPU, or multi-GPU jobs on a single machine, call this function
without args to return a MirroredStrategy.
For TPU jobs, specify an address to the `tpu` argument.
For multi-machine GPU jobs, specify a `cluster_config` argument of the cluster
configuration.
Args:
tpu: Address of the TPU. No TPU if left blank.
cluster_config: Should be specified only for multi-worker jobs.
Task specific dictionary for cluster config dict in the TF_CONFIG format.
https://www.tensorflow.org/guide/distributed_training#setting_up_tf_config_environment_variable
If passed as a string, will be parsed to a dictionary. Two components
should be specified: cluster and task. Cluster provides information about
the training cluster, which is a dict consisting of different types of
jobs such as chief and worker. Task is information about the current task.
For example: "{"cluster": {"worker": ["host1:port", "host2:port"]},
"task": {"type": "worker", "index": 0}}"
Returns:
A distribution strategy. MirroredStrategy by default. TPUStrategy if `tpu`
arg is specified. MultiWorkerMirroredStrategy if `cluster_config` arg is
specified.
"""
if tpu:
logging.info('Use TPU at %s', tpu)
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=tpu)
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
elif cluster_config:
if not isinstance(cluster_config, dict):
cluster_config = json.loads(cluster_config)
cluster_spec = tf.train.ClusterSpec(cluster_config['cluster'])
resolver = tf.distribute.cluster_resolver.SimpleClusterResolver(
cluster_spec=cluster_spec,
task_type=cluster_config['task']['type'],
task_id=cluster_config['task']['index'],
num_accelerators={'GPU': len(tf.config.list_physical_devices('GPU'))},
rpc_layer='grpc')
strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy(
cluster_resolver=resolver)
else:
logging.info('Defaulting to MirroredStrategy')
strategy = tf.distribute.MirroredStrategy()
return strategy
def expand_path(file_path):
return os.path.expanduser(os.path.expandvars(file_path))
def get_latest_file(dir_path, prefix='operative_config-', suffix='.gin'):
"""Returns latest file with pattern '/dir_path/prefix[iteration]suffix'.
Args:
dir_path: Path to the directory.
prefix: Filename prefix, not including directory.
suffix: Filename suffix, including extension.
Returns:
Path to the latest file
Raises:
FileNotFoundError: If no files match the pattern
'/dir_path/prefix[int]suffix'.
"""
dir_path = expand_path(dir_path)
dir_prefix = os.path.join(dir_path, prefix)
search_pattern = dir_prefix + '*' + suffix
file_paths = tf.io.gfile.glob(search_pattern)
if not file_paths:
raise FileNotFoundError(
f'No files found matching the pattern \'{search_pattern}\'.')
try:
# Filter to get highest iteration, no negative iterations.
get_iter = lambda fp: abs(int(fp.split(dir_prefix)[-1].split(suffix)[0]))
latest_file = max(file_paths, key=get_iter)
return latest_file
except ValueError as verror:
raise FileNotFoundError(
f'Files found with pattern \'{search_pattern}\' do not match '
f'the pattern \'{dir_prefix}[iteration_number]{suffix}\'.\n\n'
f'Files found:\n{file_paths}') from verror
def get_latest_checkpoint(checkpoint_path):
"""Helper function to get path to latest checkpoint.
Args:
checkpoint_path: Path to the directory containing model checkpoints, or
to a specific checkpoint (e.g. `/path/to/model.ckpt-iteration`).
Returns:
Path to latest checkpoint.
Raises:
FileNotFoundError: If no checkpoint is found.
"""
checkpoint_path = expand_path(checkpoint_path)
is_checkpoint = tf.io.gfile.exists(checkpoint_path + '.index')
if is_checkpoint:
# Return the path if it points to a checkpoint.
return checkpoint_path
else:
# Search using 'checkpoints' file.
# Returns None if no 'checkpoints' file, or directory doesn't exist.
ckpt = tf.train.latest_checkpoint(checkpoint_path)
if ckpt:
return ckpt
else:
# Last resort, look for '/path/ckpt-[iter].index' files.
ckpt_f = get_latest_file(checkpoint_path, prefix='ckpt-', suffix='.index')
return ckpt_f.split('.index')[0]
# ---------------------------------- Gin ---------------------------------------
def get_latest_operative_config(restore_dir):
"""Finds the most recently saved operative_config in a directory.
Args:
restore_dir: Path to directory with gin operative_configs. Will also work
if passing a path to a file in that directory such as a checkpoint.
Returns:
Filepath to most recent operative config.
Raises:
FileNotFoundError: If no config is found.
"""
try:
return get_latest_file(
restore_dir, prefix='operative_config-', suffix='.gin')
except FileNotFoundError:
return get_latest_file(
os.path.dirname(restore_dir), prefix='operative_config-', suffix='.gin')
def write_gin_config(summary_writer, save_dir, step):
""""Writes gin operative_config to save_dir and tensorboard."""
config_str = gin.operative_config_str()
# Save the original config string to a file.
base_name = 'operative_config-{}'.format(step)
fname = os.path.join(save_dir, base_name + '.gin')
with tf.io.gfile.GFile(fname, 'w') as f:
f.write(config_str)
# Formatting hack copied from gin.tf.GinConfigSaverHook.
def format_for_tensorboard(line):
"""Convert a single line to markdown format."""
if not line.startswith('#'):
return ' ' + line
line = line[2:]
if line.startswith('===='):
return ''
if line.startswith('None'):
return ' # None.'
if line.endswith(':'):
return '#### ' + line
return line
# Convert config string to markdown.
md_lines = []
for line in config_str.splitlines():
md_line = format_for_tensorboard(line)
if md_line is not None:
md_lines.append(md_line)
md_config_str = '\n'.join(md_lines)
# Add to tensorboard.
with summary_writer.as_default():
text_tensor = tf.convert_to_tensor(md_config_str)
tf.summary.text(name='gin/' + base_name, data=text_tensor, step=step)
summary_writer.flush()
def gin_register_keras_layers():
"""Registers all keras layers and Sequential to be referenceable in gin."""
# Register sequential model.
gin.external_configurable(tf.keras.Sequential, 'tf.keras.Sequential')
# Register all the layers.
for k, v in inspect.getmembers(tf.keras.layers):
# Duck typing for tf.keras.layers.Layer since keras uses metaclasses.
if hasattr(v, 'variables'):
gin.external_configurable(v, f'tf.keras.layers.{k}')
# ------------------------ Training Loop ---------------------------------------
@gin.configurable
def train(data_provider,
trainer,
batch_size=32,
num_steps=1000000,
steps_per_summary=300,
steps_per_save=300,
save_dir='/tmp/ddsp',
restore_dir='/tmp/ddsp',
early_stop_loss_value=None,
report_loss_to_hypertune=False):
"""Main training loop.
Args:
data_provider: DataProvider object for training data.
trainer: Trainer object built with Model to train.
batch_size: Total batch size.
num_steps: Number of training steps.
steps_per_summary: Number of training steps per summary save.
steps_per_save: Number of training steps per checkpoint save.
save_dir: Directory where checkpoints and summaries will be saved.
If empty string, no checkpoints or summaries will be saved.
restore_dir: Directory where latest checkpoints for resuming the training
are stored. If there are no checkpoints in this directory, training will
begin anew.
early_stop_loss_value: Early stopping. When the total_loss reaches below this
value training stops. If None training will run for num_steps steps.
report_loss_to_hypertune: Report loss values to hypertune package for
hyperparameter tuning, such as on Google Cloud AI-Platform.
"""
# Get a distributed dataset iterator.
dataset = data_provider.get_batch(batch_size, shuffle=True, repeats=-1)
dataset = trainer.distribute_dataset(dataset)
dataset_iter = iter(dataset)
# Build model, easiest to just run forward pass.
trainer.build(next(dataset_iter))
# Load latest checkpoint if one exists in load directory.
try:
trainer.restore(restore_dir)
except FileNotFoundError:
logging.info('No existing checkpoint found in %s, skipping '
'checkpoint loading.', restore_dir)
if save_dir:
# Set up the summary writer and metrics.
summary_dir = os.path.join(save_dir, 'summaries', 'train')
summary_writer = tf.summary.create_file_writer(summary_dir)
# Save the gin config.
write_gin_config(summary_writer, save_dir, trainer.step.numpy())
else:
# Need to create a dummy writer, even if no save_dir is provided.
summary_writer = tf.summary.create_noop_writer()
# Train.
with summary_writer.as_default():
tick = time.time()
for iteration in range(num_steps):
step = trainer.step # Step is not iteration if restarting a model.
# Take a step.
losses = trainer.train_step(dataset_iter)
# Create training loss metrics when starting/restarting training.
if iteration == 0:
loss_names = list(losses.keys())
logging.info('Creating metrics for %s', loss_names)
avg_losses = {name: tf.keras.metrics.Mean(name=name, dtype=tf.float32)
for name in loss_names}
# Update metrics.
for k, v in losses.items():
avg_losses[k].update_state(v)
# Log the step.
log_str = 'step: {}\t'.format(int(step.numpy()))
for k, v in losses.items():
log_str += '{}: {:.2f}\t'.format(k, v)
logging.info(log_str)
# Write Summaries.
if step % steps_per_summary == 0 and save_dir:
# Speed.
steps_per_sec = steps_per_summary / (time.time() - tick)
tf.summary.scalar('steps_per_sec', steps_per_sec, step=step)
tick = time.time()
# Metrics.
for k, metric in avg_losses.items():
tf.summary.scalar('losses/{}'.format(k), metric.result(), step=step)
metric.reset_states()
# Report metrics for hyperparameter tuning if enabled.
if report_loss_to_hypertune:
cloud.report_metric_to_hypertune(losses['total_loss'], step.numpy())
# Stop the training when the loss reaches given value
if (early_stop_loss_value is not None and
losses['total_loss'] <= early_stop_loss_value):
logging.info('Total loss reached early stopping value of %s',
early_stop_loss_value)
# Write a final checkpoint.
if save_dir:
trainer.save(save_dir)
summary_writer.flush()
break
# Save Model.
if step % steps_per_save == 0 and save_dir:
trainer.save(save_dir)
summary_writer.flush()
logging.info('Training Finished!')
| magenta/ddsp | ddsp/training/train_util.py | Python | apache-2.0 | 12,295 | 0.008459 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os.path
import sys
import types
import unittest
from contextlib import contextmanager
from django.template import Context, TemplateDoesNotExist
from django.template.engine import Engine
from django.test import SimpleTestCase, override_settings
from django.utils import six
from .utils import TEMPLATE_DIR
try:
import pkg_resources
except ImportError:
pkg_resources = None
class CachedLoaderTests(SimpleTestCase):
def create_engine(self, **kwargs):
return Engine(
loaders=[
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
]),
],
)
def test_templatedir_caching(self):
"""
#13573 -- Template directories should be part of the cache key.
"""
engine = self.create_engine()
# Retrieve a template specifying a template directory to check
t1, name = engine.find_template('test.html', (os.path.join(TEMPLATE_DIR, 'first'),))
# Now retrieve the same template name, but from a different directory
t2, name = engine.find_template('test.html', (os.path.join(TEMPLATE_DIR, 'second'),))
# The two templates should not have the same content
self.assertNotEqual(t1.render(Context({})), t2.render(Context({})))
def test_missing_template_is_cached(self):
"""
#19949 -- TemplateDoesNotExist exceptions should be cached.
"""
engine = self.create_engine()
loader = engine.template_loaders[0]
self.assertFalse('missing.html' in loader.template_cache)
with self.assertRaises(TemplateDoesNotExist):
loader.load_template("missing.html")
self.assertEqual(
loader.template_cache["missing.html"],
TemplateDoesNotExist,
"Cached loader failed to cache the TemplateDoesNotExist exception",
)
def test_debug_nodelist_name(self):
template_name = 'index.html'
engine = Engine(dirs=[TEMPLATE_DIR], debug=True)
template = engine.get_template(template_name)
name = template.nodelist[0].source[0].name
self.assertTrue(
name.endswith(template_name),
'Template loaded through cached loader has incorrect name for debug page: %s' % template_name,
)
template = engine.get_template(template_name)
name = template.nodelist[0].source[0].name
self.assertTrue(
name.endswith(template_name),
'Cached template loaded through cached loader has incorrect name for debug page: %s' % template_name,
)
@unittest.skipUnless(pkg_resources, 'setuptools is not installed')
class EggLoaderTests(SimpleTestCase):
@contextmanager
def create_egg(self, name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of template names mapped to file-like objects.
"""
if six.PY2:
name = name.encode('utf-8')
class MockLoader(object):
pass
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self, path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
def _fn(self, base, resource_name):
return os.path.normcase(resource_name)
egg = types.ModuleType(name)
egg.__loader__ = MockLoader()
egg.__path__ = ['/some/bogus/path/']
egg.__file__ = '/some/bogus/path/__init__.pyc'
egg._resources = resources
sys.modules[name] = egg
pkg_resources._provider_factories[MockLoader] = MockProvider
try:
yield
finally:
del sys.modules[name]
del pkg_resources._provider_factories[MockLoader]
def setUp(self):
engine = Engine(loaders=[
'django.template.loaders.eggs.Loader',
])
self.loader = engine.template_loaders[0]
def test_existing(self):
templates = {
os.path.normcase('templates/y.html'): six.StringIO("y"),
}
with self.create_egg('egg', templates):
with override_settings(INSTALLED_APPS=['egg']):
contents, template_name = self.loader.load_template_source("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg:templates/y.html")
def test_non_existing(self):
"""
Template loading fails if the template is not in the egg.
"""
with self.create_egg('egg', {}):
with override_settings(INSTALLED_APPS=['egg']):
with self.assertRaises(TemplateDoesNotExist):
self.loader.load_template_source("not-existing.html")
def test_not_installed(self):
"""
Template loading fails if the egg is not in INSTALLED_APPS.
"""
templates = {
os.path.normcase('templates/y.html'): six.StringIO("y"),
}
with self.create_egg('egg', templates):
with self.assertRaises(TemplateDoesNotExist):
self.loader.load_template_source("y.html")
class FileSystemLoaderTests(SimpleTestCase):
def setUp(self):
self.engine = Engine()
@contextmanager
def source_checker(self, dirs):
loader = self.engine.template_loaders[0]
def check_sources(path, expected_sources):
expected_sources = [os.path.abspath(s) for s in expected_sources]
self.assertEqual(
list(loader.get_template_sources(path, dirs)),
expected_sources,
)
yield check_sources
def test_directory_security(self):
with self.source_checker(['/dir1', '/dir2']) as check_sources:
check_sources('index.html', ['/dir1/index.html', '/dir2/index.html'])
check_sources('/etc/passwd', [])
check_sources('etc/passwd', ['/dir1/etc/passwd', '/dir2/etc/passwd'])
check_sources('../etc/passwd', [])
check_sources('../../../etc/passwd', [])
check_sources('/dir1/index.html', ['/dir1/index.html'])
check_sources('../dir2/index.html', ['/dir2/index.html'])
check_sources('/dir1blah', [])
check_sources('../dir1blah', [])
def test_unicode_template_name(self):
with self.source_checker(['/dir1', '/dir2']) as check_sources:
# UTF-8 bytestrings are permitted.
check_sources(b'\xc3\x85ngstr\xc3\xb6m', ['/dir1/Ångström', '/dir2/Ångström'])
# Unicode strings are permitted.
check_sources('Ångström', ['/dir1/Ångström', '/dir2/Ångström'])
def test_utf8_bytestring(self):
"""
Invalid UTF-8 encoding in bytestrings should raise a useful error
"""
engine = Engine()
loader = engine.template_loaders[0]
with self.assertRaises(UnicodeDecodeError):
list(loader.get_template_sources(b'\xc3\xc3', ['/dir1']))
def test_unicode_dir_name(self):
with self.source_checker([b'/Stra\xc3\x9fe']) as check_sources:
check_sources('Ångström', ['/Straße/Ångström'])
check_sources(b'\xc3\x85ngstr\xc3\xb6m', ['/Straße/Ångström'])
@unittest.skipUnless(
os.path.normcase('/TEST') == os.path.normpath('/test'),
"This test only runs on case-sensitive file systems.",
)
def test_case_sensitivity(self):
with self.source_checker(['/dir1', '/DIR2']) as check_sources:
check_sources('index.html', ['/dir1/index.html', '/DIR2/index.html'])
check_sources('/DIR1/index.HTML', ['/DIR1/index.HTML'])
class AppDirectoriesLoaderTest(FileSystemLoaderTests):
def setUp(self):
self.engine = Engine(
loaders=['django.template.loaders.app_directories.Loader'],
)
| abhattad4/Digi-Menu | tests/template_tests/test_loaders.py | Python | bsd-3-clause | 8,433 | 0.00107 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A Transformed Distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import distribution as distribution_lib
from tensorflow.python.ops.distributions import identity_bijector
from tensorflow.python.ops.distributions import util as distribution_util
from tensorflow.python.util import deprecation
__all__ = [
"TransformedDistribution",
]
# The following helper functions attempt to statically perform a TF operation.
# These functions make debugging easier since we can do more validation during
# graph construction.
def _static_value(x):
"""Returns the static value of a `Tensor` or `None`."""
return tensor_util.constant_value(ops.convert_to_tensor(x))
def _logical_and(*args):
"""Convenience function which attempts to statically `reduce_all`."""
args_ = [_static_value(x) for x in args]
if any(x is not None and not bool(x) for x in args_):
return constant_op.constant(False)
if all(x is not None and bool(x) for x in args_):
return constant_op.constant(True)
if len(args) == 2:
return math_ops.logical_and(*args)
return math_ops.reduce_all(args)
def _logical_equal(x, y):
"""Convenience function which attempts to statically compute `x == y`."""
x_ = _static_value(x)
y_ = _static_value(y)
if x_ is None or y_ is None:
return math_ops.equal(x, y)
return constant_op.constant(np.array_equal(x_, y_))
def _logical_not(x):
"""Convenience function which attempts to statically apply `logical_not`."""
x_ = _static_value(x)
if x_ is None:
return math_ops.logical_not(x)
return constant_op.constant(np.logical_not(x_))
def _concat_vectors(*args):
"""Convenience function which concatenates input vectors."""
args_ = [_static_value(x) for x in args]
if any(x_ is None for x_ in args_):
return array_ops.concat(args, 0)
return constant_op.constant([x_ for vec_ in args_ for x_ in vec_])
def _pick_scalar_condition(pred, cond_true, cond_false):
"""Convenience function which chooses the condition based on the predicate."""
# Note: This function is only valid if all of pred, cond_true, and cond_false
# are scalars. This means its semantics are arguably more like tf.cond than
# tf.select even though we use tf.select to implement it.
pred_ = _static_value(pred)
if pred_ is None:
return array_ops.where(pred, cond_true, cond_false)
return cond_true if pred_ else cond_false
def _ones_like(x):
"""Convenience function attempts to statically construct `ones_like`."""
# Should only be used for small vectors.
if x.get_shape().is_fully_defined():
return array_ops.ones(x.get_shape().as_list(), dtype=x.dtype)
return array_ops.ones_like(x)
def _ndims_from_shape(shape):
"""Returns `Tensor`'s `rank` implied by a `Tensor` shape."""
if shape.get_shape().ndims not in (None, 1):
raise ValueError("input is not a valid shape: not 1D")
if not shape.dtype.is_integer:
raise TypeError("input is not a valid shape: wrong dtype")
if shape.get_shape().is_fully_defined():
return constant_op.constant(shape.get_shape().as_list()[0])
return array_ops.shape(shape)[0]
def _is_scalar_from_shape(shape):
"""Returns `True` `Tensor` if `Tensor` shape implies a scalar."""
return _logical_equal(_ndims_from_shape(shape), 0)
class TransformedDistribution(distribution_lib.Distribution):
"""A Transformed Distribution.
A `TransformedDistribution` models `p(y)` given a base distribution `p(x)`,
and a deterministic, invertible, differentiable transform, `Y = g(X)`. The
transform is typically an instance of the `Bijector` class and the base
distribution is typically an instance of the `Distribution` class.
A `Bijector` is expected to implement the following functions:
- `forward`,
- `inverse`,
- `inverse_log_det_jacobian`.
The semantics of these functions are outlined in the `Bijector` documentation.
We now describe how a `TransformedDistribution` alters the input/outputs of a
`Distribution` associated with a random variable (rv) `X`.
Write `cdf(Y=y)` for an absolutely continuous cumulative distribution function
of random variable `Y`; write the probability density function `pdf(Y=y) :=
d^k / (dy_1,...,dy_k) cdf(Y=y)` for its derivative wrt to `Y` evaluated at
`y`. Assume that `Y = g(X)` where `g` is a deterministic diffeomorphism,
i.e., a non-random, continuous, differentiable, and invertible function.
Write the inverse of `g` as `X = g^{-1}(Y)` and `(J o g)(x)` for the Jacobian
of `g` evaluated at `x`.
A `TransformedDistribution` implements the following operations:
* `sample`
Mathematically: `Y = g(X)`
Programmatically: `bijector.forward(distribution.sample(...))`
* `log_prob`
Mathematically: `(log o pdf)(Y=y) = (log o pdf o g^{-1})(y)
+ (log o abs o det o J o g^{-1})(y)`
Programmatically: `(distribution.log_prob(bijector.inverse(y))
+ bijector.inverse_log_det_jacobian(y))`
* `log_cdf`
Mathematically: `(log o cdf)(Y=y) = (log o cdf o g^{-1})(y)`
Programmatically: `distribution.log_cdf(bijector.inverse(x))`
* and similarly for: `cdf`, `prob`, `log_survival_function`,
`survival_function`.
A simple example constructing a Log-Normal distribution from a Normal
distribution:
```python
ds = tfp.distributions
log_normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Exp(),
name="LogNormalTransformedDistribution")
```
A `LogNormal` made from callables:
```python
ds = tfp.distributions
log_normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Inline(
forward_fn=tf.exp,
inverse_fn=tf.log,
inverse_log_det_jacobian_fn=(
lambda y: -tf.reduce_sum(tf.log(y), axis=-1)),
name="LogNormalTransformedDistribution")
```
Another example constructing a Normal from a StandardNormal:
```python
ds = tfp.distributions
normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Affine(
shift=-1.,
scale_identity_multiplier=2.)
name="NormalTransformedDistribution")
```
A `TransformedDistribution`'s batch- and event-shape are implied by the base
distribution unless explicitly overridden by `batch_shape` or `event_shape`
arguments. Specifying an overriding `batch_shape` (`event_shape`) is
permitted only if the base distribution has scalar batch-shape (event-shape).
The bijector is applied to the distribution as if the distribution possessed
the overridden shape(s). The following example demonstrates how to construct a
multivariate Normal as a `TransformedDistribution`.
```python
ds = tfp.distributions
# We will create two MVNs with batch_shape = event_shape = 2.
mean = [[-1., 0], # batch:0
[0., 1]] # batch:1
chol_cov = [[[1., 0],
[0, 1]], # batch:0
[[1, 0],
[2, 2]]] # batch:1
mvn1 = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Affine(shift=mean, scale_tril=chol_cov),
batch_shape=[2], # Valid because base_distribution.batch_shape == [].
event_shape=[2]) # Valid because base_distribution.event_shape == [].
mvn2 = ds.MultivariateNormalTriL(loc=mean, scale_tril=chol_cov)
# mvn1.log_prob(x) == mvn2.log_prob(x)
```
"""
@deprecation.deprecated(
"2019-01-01",
"The TensorFlow Distributions library has moved to "
"TensorFlow Probability "
"(https://github.com/tensorflow/probability). You "
"should update all references to use `tfp.distributions` "
"instead of `tf.distributions`.",
warn_once=True)
def __init__(self,
distribution,
bijector=None,
batch_shape=None,
event_shape=None,
validate_args=False,
name=None):
"""Construct a Transformed Distribution.
Args:
distribution: The base distribution instance to transform. Typically an
instance of `Distribution`.
bijector: The object responsible for calculating the transformation.
Typically an instance of `Bijector`. `None` means `Identity()`.
batch_shape: `integer` vector `Tensor` which overrides `distribution`
`batch_shape`; valid only if `distribution.is_scalar_batch()`.
event_shape: `integer` vector `Tensor` which overrides `distribution`
`event_shape`; valid only if `distribution.is_scalar_event()`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class. Default:
`bijector.name + distribution.name`.
"""
parameters = dict(locals())
name = name or (("" if bijector is None else bijector.name) +
distribution.name)
with ops.name_scope(name, values=[event_shape, batch_shape]) as name:
# For convenience we define some handy constants.
self._zero = constant_op.constant(0, dtype=dtypes.int32, name="zero")
self._empty = constant_op.constant([], dtype=dtypes.int32, name="empty")
if bijector is None:
bijector = identity_bijector.Identity(validate_args=validate_args)
# We will keep track of a static and dynamic version of
# self._is_{batch,event}_override. This way we can do more prior to graph
# execution, including possibly raising Python exceptions.
self._override_batch_shape = self._maybe_validate_shape_override(
batch_shape, distribution.is_scalar_batch(), validate_args,
"batch_shape")
self._is_batch_override = _logical_not(_logical_equal(
_ndims_from_shape(self._override_batch_shape), self._zero))
self._is_maybe_batch_override = bool(
tensor_util.constant_value(self._override_batch_shape) is None or
tensor_util.constant_value(self._override_batch_shape).size != 0)
self._override_event_shape = self._maybe_validate_shape_override(
event_shape, distribution.is_scalar_event(), validate_args,
"event_shape")
self._is_event_override = _logical_not(_logical_equal(
_ndims_from_shape(self._override_event_shape), self._zero))
self._is_maybe_event_override = bool(
tensor_util.constant_value(self._override_event_shape) is None or
tensor_util.constant_value(self._override_event_shape).size != 0)
# To convert a scalar distribution into a multivariate distribution we
# will draw dims from the sample dims, which are otherwise iid. This is
# easy to do except in the case that the base distribution has batch dims
# and we're overriding event shape. When that case happens the event dims
# will incorrectly be to the left of the batch dims. In this case we'll
# cyclically permute left the new dims.
self._needs_rotation = _logical_and(
self._is_event_override,
_logical_not(self._is_batch_override),
_logical_not(distribution.is_scalar_batch()))
override_event_ndims = _ndims_from_shape(self._override_event_shape)
self._rotate_ndims = _pick_scalar_condition(
self._needs_rotation, override_event_ndims, 0)
# We'll be reducing the head dims (if at all), i.e., this will be []
# if we don't need to reduce.
self._reduce_event_indices = math_ops.range(
self._rotate_ndims - override_event_ndims, self._rotate_ndims)
self._distribution = distribution
self._bijector = bijector
super(TransformedDistribution, self).__init__(
dtype=self._distribution.dtype,
reparameterization_type=self._distribution.reparameterization_type,
validate_args=validate_args,
allow_nan_stats=self._distribution.allow_nan_stats,
parameters=parameters,
# We let TransformedDistribution access _graph_parents since this class
# is more like a baseclass than derived.
graph_parents=(distribution._graph_parents + # pylint: disable=protected-access
bijector.graph_parents),
name=name)
@property
def distribution(self):
"""Base distribution, p(x)."""
return self._distribution
@property
def bijector(self):
"""Function transforming x => y."""
return self._bijector
def _event_shape_tensor(self):
return self.bijector.forward_event_shape_tensor(
distribution_util.pick_vector(
self._is_event_override,
self._override_event_shape,
self.distribution.event_shape_tensor()))
def _event_shape(self):
# If there's a chance that the event_shape has been overridden, we return
# what we statically know about the `event_shape_override`. This works
# because: `_is_maybe_event_override` means `static_override` is `None` or a
# non-empty list, i.e., we don't statically know the `event_shape` or we do.
#
# Since the `bijector` may change the `event_shape`, we then forward what we
# know to the bijector. This allows the `bijector` to have final say in the
# `event_shape`.
static_override = tensor_util.constant_value_as_shape(
self._override_event_shape)
return self.bijector.forward_event_shape(
static_override
if self._is_maybe_event_override
else self.distribution.event_shape)
def _batch_shape_tensor(self):
return distribution_util.pick_vector(
self._is_batch_override,
self._override_batch_shape,
self.distribution.batch_shape_tensor())
def _batch_shape(self):
# If there's a chance that the batch_shape has been overridden, we return
# what we statically know about the `batch_shape_override`. This works
# because: `_is_maybe_batch_override` means `static_override` is `None` or a
# non-empty list, i.e., we don't statically know the `batch_shape` or we do.
#
# Notice that this implementation parallels the `_event_shape` except that
# the `bijector` doesn't get to alter the `batch_shape`. Recall that
# `batch_shape` is a property of a distribution while `event_shape` is
# shared between both the `distribution` instance and the `bijector`.
static_override = tensor_util.constant_value_as_shape(
self._override_batch_shape)
return (static_override
if self._is_maybe_batch_override
else self.distribution.batch_shape)
def _sample_n(self, n, seed=None):
sample_shape = _concat_vectors(
distribution_util.pick_vector(self._needs_rotation, self._empty, [n]),
self._override_batch_shape,
self._override_event_shape,
distribution_util.pick_vector(self._needs_rotation, [n], self._empty))
x = self.distribution.sample(sample_shape=sample_shape, seed=seed)
x = self._maybe_rotate_dims(x)
# We'll apply the bijector in the `_call_sample_n` function.
return x
def _call_sample_n(self, sample_shape, seed, name, **kwargs):
# We override `_call_sample_n` rather than `_sample_n` so we can ensure that
# the result of `self.bijector.forward` is not modified (and thus caching
# works).
with self._name_scope(name, values=[sample_shape]):
sample_shape = ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32, name="sample_shape")
sample_shape, n = self._expand_sample_shape_to_vector(
sample_shape, "sample_shape")
# First, generate samples. We will possibly generate extra samples in the
# event that we need to reinterpret the samples as part of the
# event_shape.
x = self._sample_n(n, seed, **kwargs)
# Next, we reshape `x` into its final form. We do this prior to the call
# to the bijector to ensure that the bijector caching works.
batch_event_shape = array_ops.shape(x)[1:]
final_shape = array_ops.concat([sample_shape, batch_event_shape], 0)
x = array_ops.reshape(x, final_shape)
# Finally, we apply the bijector's forward transformation. For caching to
# work, it is imperative that this is the last modification to the
# returned result.
y = self.bijector.forward(x, **kwargs)
y = self._set_sample_static_shape(y, sample_shape)
return y
def _log_prob(self, y):
# For caching to work, it is imperative that the bijector is the first to
# modify the input.
x = self.bijector.inverse(y)
event_ndims = self._maybe_get_static_event_ndims()
ildj = self.bijector.inverse_log_det_jacobian(y, event_ndims=event_ndims)
if self.bijector._is_injective: # pylint: disable=protected-access
return self._finish_log_prob_for_one_fiber(y, x, ildj, event_ndims)
lp_on_fibers = [
self._finish_log_prob_for_one_fiber(y, x_i, ildj_i, event_ndims)
for x_i, ildj_i in zip(x, ildj)]
return math_ops.reduce_logsumexp(array_ops.stack(lp_on_fibers), axis=0)
def _finish_log_prob_for_one_fiber(self, y, x, ildj, event_ndims):
"""Finish computation of log_prob on one element of the inverse image."""
x = self._maybe_rotate_dims(x, rotate_right=True)
log_prob = self.distribution.log_prob(x)
if self._is_maybe_event_override:
log_prob = math_ops.reduce_sum(log_prob, self._reduce_event_indices)
log_prob += math_ops.cast(ildj, log_prob.dtype)
if self._is_maybe_event_override and isinstance(event_ndims, int):
log_prob.set_shape(
array_ops.broadcast_static_shape(
y.get_shape().with_rank_at_least(1)[:-event_ndims],
self.batch_shape))
return log_prob
def _prob(self, y):
x = self.bijector.inverse(y)
event_ndims = self._maybe_get_static_event_ndims()
ildj = self.bijector.inverse_log_det_jacobian(y, event_ndims=event_ndims)
if self.bijector._is_injective: # pylint: disable=protected-access
return self._finish_prob_for_one_fiber(y, x, ildj, event_ndims)
prob_on_fibers = [
self._finish_prob_for_one_fiber(y, x_i, ildj_i, event_ndims)
for x_i, ildj_i in zip(x, ildj)]
return sum(prob_on_fibers)
def _finish_prob_for_one_fiber(self, y, x, ildj, event_ndims):
"""Finish computation of prob on one element of the inverse image."""
x = self._maybe_rotate_dims(x, rotate_right=True)
prob = self.distribution.prob(x)
if self._is_maybe_event_override:
prob = math_ops.reduce_prod(prob, self._reduce_event_indices)
prob *= math_ops.exp(math_ops.cast(ildj, prob.dtype))
if self._is_maybe_event_override and isinstance(event_ndims, int):
prob.set_shape(
array_ops.broadcast_static_shape(
y.get_shape().with_rank_at_least(1)[:-event_ndims],
self.batch_shape))
return prob
def _log_cdf(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("log_cdf is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("log_cdf is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.log_cdf(x)
def _cdf(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("cdf is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("cdf is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.cdf(x)
def _log_survival_function(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("log_survival_function is not implemented when "
"overriding event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("log_survival_function is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.log_survival_function(x)
def _survival_function(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("survival_function is not implemented when "
"overriding event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("survival_function is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.survival_function(x)
def _quantile(self, value):
if self._is_maybe_event_override:
raise NotImplementedError("quantile is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("quantile is not implemented when "
"bijector is not injective.")
# x_q is the "qth quantile" of X iff q = P[X <= x_q]. Now, since X =
# g^{-1}(Y), q = P[X <= x_q] = P[g^{-1}(Y) <= x_q] = P[Y <= g(x_q)],
# implies the qth quantile of Y is g(x_q).
inv_cdf = self.distribution.quantile(value)
return self.bijector.forward(inv_cdf)
def _entropy(self):
if not self.bijector.is_constant_jacobian:
raise NotImplementedError("entropy is not implemented")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("entropy is not implemented when "
"bijector is not injective.")
# Suppose Y = g(X) where g is a diffeomorphism and X is a continuous rv. It
# can be shown that:
# H[Y] = H[X] + E_X[(log o abs o det o J o g)(X)].
# If is_constant_jacobian then:
# E_X[(log o abs o det o J o g)(X)] = (log o abs o det o J o g)(c)
# where c can by anything.
entropy = self.distribution.entropy()
if self._is_maybe_event_override:
# H[X] = sum_i H[X_i] if X_i are mutually independent.
# This means that a reduce_sum is a simple rescaling.
entropy *= math_ops.cast(math_ops.reduce_prod(self._override_event_shape),
dtype=entropy.dtype.base_dtype)
if self._is_maybe_batch_override:
new_shape = array_ops.concat([
_ones_like(self._override_batch_shape),
self.distribution.batch_shape_tensor()
], 0)
entropy = array_ops.reshape(entropy, new_shape)
multiples = array_ops.concat([
self._override_batch_shape,
_ones_like(self.distribution.batch_shape_tensor())
], 0)
entropy = array_ops.tile(entropy, multiples)
dummy = array_ops.zeros(
shape=array_ops.concat(
[self.batch_shape_tensor(), self.event_shape_tensor()],
0),
dtype=self.dtype)
event_ndims = (self.event_shape.ndims if self.event_shape.ndims is not None
else array_ops.size(self.event_shape_tensor()))
ildj = self.bijector.inverse_log_det_jacobian(
dummy, event_ndims=event_ndims)
entropy -= math_ops.cast(ildj, entropy.dtype)
entropy.set_shape(self.batch_shape)
return entropy
def _maybe_validate_shape_override(self, override_shape, base_is_scalar,
validate_args, name):
"""Helper to __init__ which ensures override batch/event_shape are valid."""
if override_shape is None:
override_shape = []
override_shape = ops.convert_to_tensor(override_shape, dtype=dtypes.int32,
name=name)
if not override_shape.dtype.is_integer:
raise TypeError("shape override must be an integer")
override_is_scalar = _is_scalar_from_shape(override_shape)
if tensor_util.constant_value(override_is_scalar):
return self._empty
dynamic_assertions = []
if override_shape.get_shape().ndims is not None:
if override_shape.get_shape().ndims != 1:
raise ValueError("shape override must be a vector")
elif validate_args:
dynamic_assertions += [check_ops.assert_rank(
override_shape, 1,
message="shape override must be a vector")]
if tensor_util.constant_value(override_shape) is not None:
if any(s <= 0 for s in tensor_util.constant_value(override_shape)):
raise ValueError("shape override must have positive elements")
elif validate_args:
dynamic_assertions += [check_ops.assert_positive(
override_shape,
message="shape override must have positive elements")]
is_both_nonscalar = _logical_and(_logical_not(base_is_scalar),
_logical_not(override_is_scalar))
if tensor_util.constant_value(is_both_nonscalar) is not None:
if tensor_util.constant_value(is_both_nonscalar):
raise ValueError("base distribution not scalar")
elif validate_args:
dynamic_assertions += [check_ops.assert_equal(
is_both_nonscalar, False,
message="base distribution not scalar")]
if not dynamic_assertions:
return override_shape
return control_flow_ops.with_dependencies(
dynamic_assertions, override_shape)
def _maybe_rotate_dims(self, x, rotate_right=False):
"""Helper which rolls left event_dims left or right event_dims right."""
needs_rotation_const = tensor_util.constant_value(self._needs_rotation)
if needs_rotation_const is not None and not needs_rotation_const:
return x
ndims = array_ops.rank(x)
n = (ndims - self._rotate_ndims) if rotate_right else self._rotate_ndims
return array_ops.transpose(
x, _concat_vectors(math_ops.range(n, ndims), math_ops.range(0, n)))
def _maybe_get_static_event_ndims(self):
if self.event_shape.ndims is not None:
return self.event_shape.ndims
event_ndims = array_ops.size(self.event_shape_tensor())
event_ndims_ = distribution_util.maybe_get_static_value(event_ndims)
if event_ndims_ is not None:
return event_ndims_
return event_ndims
| jbedorf/tensorflow | tensorflow/python/ops/distributions/transformed_distribution.py | Python | apache-2.0 | 27,637 | 0.005826 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import contextlib
import os
import subprocess
from .tester import Tester
from .util import run_cmd, log
# FIXME(sbinet): revisit for Go modules
_HOME = os.getenv("HOME", "~")
_GOPATH = os.getenv("GOPATH", os.path.join(_HOME, "go"))
_GOBIN = os.environ.get("GOBIN", os.path.join(_GOPATH, "bin"))
_GO_INTEGRATION_EXE = os.path.join(_GOBIN, "arrow-json-integration-test")
_STREAM_TO_FILE = os.path.join(_GOBIN, "arrow-stream-to-file")
_FILE_TO_STREAM = os.path.join(_GOBIN, "arrow-file-to-stream")
_FLIGHT_SERVER_CMD = [os.path.join(_GOBIN, "arrow-flight-integration-server")]
_FLIGHT_CLIENT_CMD = [
os.path.join(_GOBIN, "arrow-flight-integration-client"),
"-host",
"localhost",
]
class GoTester(Tester):
PRODUCER = True
CONSUMER = True
FLIGHT_SERVER = True
FLIGHT_CLIENT = True
name = 'Go'
def _run(self, arrow_path=None, json_path=None, command='VALIDATE'):
cmd = [_GO_INTEGRATION_EXE]
if arrow_path is not None:
cmd.extend(['-arrow', arrow_path])
if json_path is not None:
cmd.extend(['-json', json_path])
cmd.extend(['-mode', command])
if self.debug:
log(' '.join(cmd))
run_cmd(cmd)
def validate(self, json_path, arrow_path, quirks=None):
return self._run(arrow_path, json_path, 'VALIDATE')
def json_to_file(self, json_path, arrow_path):
return self._run(arrow_path, json_path, 'JSON_TO_ARROW')
def stream_to_file(self, stream_path, file_path):
cmd = [_STREAM_TO_FILE, '<', stream_path, '>', file_path]
self.run_shell_command(cmd)
def file_to_stream(self, file_path, stream_path):
cmd = [_FILE_TO_STREAM, file_path, '>', stream_path]
self.run_shell_command(cmd)
@contextlib.contextmanager
def flight_server(self, scenario_name=None):
cmd = _FLIGHT_SERVER_CMD + ['-port=0']
if scenario_name:
cmd = cmd + ['-scenario', scenario_name]
if self.debug:
log(' '.join(cmd))
server = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
try:
output = server.stdout.readline().decode()
if not output.startswith('Server listening on localhost:'):
server.kill()
out, err = server.communicate()
raise RuntimeError(
'Flight-Go server did not start properly, '
'stdout: \n{}\n\nstderr:\n{}\n'.format(
output + out.decode(), err.decode()
)
)
port = int(output.split(':')[1])
yield port
finally:
server.kill()
server.wait(5)
def flight_request(self, port, json_path=None, scenario_name=None):
cmd = _FLIGHT_CLIENT_CMD + [
'-port=' + str(port),
]
if json_path:
cmd.extend(('-path', json_path))
elif scenario_name:
cmd.extend(('-scenario', scenario_name))
else:
raise TypeError('Must provide one of json_path or scenario_name')
if self.debug:
log(' '.join(cmd))
run_cmd(cmd)
| apache/arrow | dev/archery/archery/integration/tester_go.py | Python | apache-2.0 | 3,999 | 0 |
"""
This module implements connections for MySQLdb. Presently there is
only one class: Connection. Others are unlikely. However, you might
want to make your own subclasses. In most cases, you will probably
override Connection.default_cursor with a non-standard Cursor class.
"""
from MySQLdb import cursors
from _mysql_exceptions import Warning, Error, InterfaceError, DataError, \
DatabaseError, OperationalError, IntegrityError, InternalError, \
NotSupportedError, ProgrammingError
import types, _mysql
import re
def defaulterrorhandler(connection, cursor, errorclass, errorvalue):
"""
If cursor is not None, (errorclass, errorvalue) is appended to
cursor.messages; otherwise it is appended to
connection.messages. Then errorclass is raised with errorvalue as
the value.
You can override this with your own error handler by assigning it
to the instance.
"""
error = errorclass, errorvalue
if cursor:
cursor.messages.append(error)
else:
connection.messages.append(error)
del cursor
del connection
raise errorclass, errorvalue
re_numeric_part = re.compile(r"^(\d+)")
def numeric_part(s):
"""Returns the leading numeric part of a string.
>>> numeric_part("20-alpha")
20
>>> numeric_part("foo")
>>> numeric_part("16b")
16
"""
m = re_numeric_part.match(s)
if m:
return int(m.group(1))
return None
class Connection(_mysql.connection):
"""MySQL Database Connection Object"""
default_cursor = cursors.Cursor
def __init__(self, *args, **kwargs):
"""
Create a connection to the database. It is strongly recommended
that you only use keyword parameters. Consult the MySQL C API
documentation for more information.
host
string, host to connect
user
string, user to connect as
passwd
string, password to use
db
string, database to use
port
integer, TCP/IP port to connect to
unix_socket
string, location of unix_socket to use
conv
conversion dictionary, see MySQLdb.converters
connect_timeout
number of seconds to wait before the connection attempt
fails.
compress
if set, compression is enabled
named_pipe
if set, a named pipe is used to connect (Windows only)
init_command
command which is run once the connection is created
read_default_file
file from which default client values are read
read_default_group
configuration group to use from the default file
cursorclass
class object, used to create cursors (keyword only)
use_unicode
If True, text-like columns are returned as unicode objects
using the connection's character set. Otherwise, text-like
columns are returned as strings. columns are returned as
normal strings. Unicode objects will always be encoded to
the connection's character set regardless of this setting.
charset
If supplied, the connection character set will be changed
to this character set (MySQL-4.1 and newer). This implies
use_unicode=True.
sql_mode
If supplied, the session SQL mode will be changed to this
setting (MySQL-4.1 and newer). For more details and legal
values, see the MySQL documentation.
client_flag
integer, flags to use or 0
(see MySQL docs or constants/CLIENTS.py)
ssl
dictionary or mapping, contains SSL connection parameters;
see the MySQL documentation for more details
(mysql_ssl_set()). If this is set, and the client does not
support SSL, NotSupportedError will be raised.
local_infile
integer, non-zero enables LOAD LOCAL INFILE; zero disables
autocommit
If False (default), autocommit is disabled.
If True, autocommit is enabled.
If None, autocommit isn't set and server default is used.
There are a number of undocumented, non-standard methods. See the
documentation for the MySQL C API for some hints on what they do.
"""
from MySQLdb.constants import CLIENT, FIELD_TYPE
from MySQLdb.converters import conversions
from weakref import proxy
kwargs2 = kwargs.copy()
if 'conv' in kwargs:
conv = kwargs['conv']
else:
conv = conversions
conv2 = {}
for k, v in conv.items():
if isinstance(k, int) and isinstance(v, list):
conv2[k] = v[:]
else:
conv2[k] = v
kwargs2['conv'] = conv2
cursorclass = kwargs2.pop('cursorclass', self.default_cursor)
charset = kwargs2.pop('charset', '')
if charset:
use_unicode = True
else:
use_unicode = False
use_unicode = kwargs2.pop('use_unicode', use_unicode)
sql_mode = kwargs2.pop('sql_mode', '')
client_flag = kwargs.get('client_flag', 0)
client_version = tuple([ numeric_part(n) for n in _mysql.get_client_info().split('.')[:2] ])
if client_version >= (4, 1):
client_flag |= CLIENT.MULTI_STATEMENTS
if client_version >= (5, 0):
client_flag |= CLIENT.MULTI_RESULTS
kwargs2['client_flag'] = client_flag
# PEP-249 requires autocommit to be initially off
autocommit = kwargs2.pop('autocommit', False)
super(Connection, self).__init__(*args, **kwargs2)
self.cursorclass = cursorclass
self.encoders = dict([ (k, v) for k, v in conv.items()
if type(k) is not int ])
self._server_version = tuple([ numeric_part(n) for n in self.get_server_info().split('.')[:2] ])
db = proxy(self)
def _get_string_literal():
def string_literal(obj, dummy=None):
return db.string_literal(obj)
return string_literal
def _get_unicode_literal():
def unicode_literal(u, dummy=None):
return db.literal(u.encode(unicode_literal.charset))
return unicode_literal
def _get_string_decoder():
def string_decoder(s):
return s.decode(string_decoder.charset)
return string_decoder
string_literal = _get_string_literal()
self.unicode_literal = unicode_literal = _get_unicode_literal()
self.string_decoder = string_decoder = _get_string_decoder()
if not charset:
charset = self.character_set_name()
self.set_character_set(charset)
if sql_mode:
self.set_sql_mode(sql_mode)
if use_unicode:
self.converter[FIELD_TYPE.STRING].append((None, string_decoder))
self.converter[FIELD_TYPE.VAR_STRING].append((None, string_decoder))
self.converter[FIELD_TYPE.VARCHAR].append((None, string_decoder))
self.converter[FIELD_TYPE.BLOB].append((None, string_decoder))
self.encoders[types.StringType] = string_literal
self.encoders[types.UnicodeType] = unicode_literal
self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS
if self._transactional:
if autocommit is not None:
self.autocommit(autocommit)
self.messages = []
def autocommit(self, on):
on = bool(on)
if self.get_autocommit() != on:
_mysql.connection.autocommit(self, on)
def cursor(self, cursorclass=None):
"""
Create a cursor on which queries may be performed. The
optional cursorclass parameter is used to create the
Cursor. By default, self.cursorclass=cursors.Cursor is
used.
"""
return (cursorclass or self.cursorclass)(self)
def __enter__(self):
if self.get_autocommit():
self.query("BEGIN")
return self.cursor()
def __exit__(self, exc, value, tb):
if exc:
self.rollback()
else:
self.commit()
def literal(self, o):
"""
If o is a single object, returns an SQL literal as a string.
If o is a non-string sequence, the items of the sequence are
converted and returned as a sequence.
Non-standard. For internal use; do not use this in your
applications.
"""
return self.escape(o, self.encoders)
def begin(self):
"""Explicitly begin a connection. Non-standard.
DEPRECATED: Will be removed in 1.3.
Use an SQL BEGIN statement instead."""
from warnings import warn
warn("begin() is non-standard and will be removed in 1.3",
DeprecationWarning, 2)
self.query("BEGIN")
if not hasattr(_mysql.connection, 'warning_count'):
def warning_count(self):
"""Return the number of warnings generated from the
last query. This is derived from the info() method."""
from string import atoi
info = self.info()
if info:
return atoi(info.split()[-1])
else:
return 0
def set_character_set(self, charset):
"""Set the connection character set to charset. The character
set can only be changed in MySQL-4.1 and newer. If you try
to change the character set from the current value in an
older version, NotSupportedError will be raised."""
if charset == "utf8mb4":
py_charset = "utf8"
else:
py_charset = charset
if self.character_set_name() != charset:
try:
super(Connection, self).set_character_set(charset)
except AttributeError:
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set charset")
self.query('SET NAMES %s' % charset)
self.store_result()
self.string_decoder.charset = py_charset
self.unicode_literal.charset = py_charset
def set_sql_mode(self, sql_mode):
"""Set the connection sql_mode. See MySQL documentation for
legal values."""
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set sql_mode")
self.query("SET SESSION sql_mode='%s'" % sql_mode)
self.store_result()
def show_warnings(self):
"""Return detailed information about warnings as a
sequence of tuples of (Level, Code, Message). This
is only supported in MySQL-4.1 and up. If your server
is an earlier version, an empty sequence is returned."""
if self._server_version < (4,1): return ()
self.query("SHOW WARNINGS")
r = self.store_result()
warnings = r.fetch_row(0)
return warnings
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
errorhandler = defaulterrorhandler
| skycucumber/Messaging-Gateway | webapp/venv/lib/python2.7/site-packages/MySQLdb/connections.py | Python | gpl-2.0 | 11,777 | 0.001359 |
import phidl.geometry as pg
import gdsfactory as gf
from gdsfactory.component import Component
@gf.cell
def outline(elements, **kwargs) -> Component:
"""
Returns Component containing the outlined polygon(s).
wraps phidl.geometry.outline
Creates an outline around all the polygons passed in the `elements`
argument. `elements` may be a Device, Polygon, or list of Devices.
Args:
elements: Device(/Reference), list of Device(/Reference), or Polygon
Polygons to outline or Device containing polygons to outline.
Keyword Args:
distance: int or float
Distance to offset polygons. Positive values expand, negative shrink.
precision: float
Desired precision for rounding vertex coordinates.
num_divisions: array-like[2] of int
The number of divisions with which the geometry is divided into
multiple rectangular regions. This allows for each region to be
processed sequentially, which is more computationally efficient.
join: {'miter', 'bevel', 'round'}
Type of join used to create the offset polygon.
tolerance: int or float
For miter joints, this number must be at least 2 and it represents the
maximal distance in multiples of offset between new vertices and their
original position before beveling to avoid spikes at acute joints. For
round joints, it indicates the curvature resolution in number of
points per full circle.
join_first: bool
Join all paths before offsetting to avoid unnecessary joins in
adjacent polygon sides.
max_points: int
The maximum number of vertices within the resulting polygon.
open_ports: bool or float
If not False, holes will be cut in the outline such that the Ports are
not covered. If True, the holes will have the same width as the Ports.
If a float, the holes will be be widened by that value (useful for fully
clearing the outline around the Ports for positive-tone processes
layer: int, array-like[2], or set
Specific layer(s) to put polygon geometry on.)
"""
return gf.read.from_phidl(component=pg.outline(elements, **kwargs))
def test_outline():
e1 = gf.components.ellipse(radii=(6, 6))
e2 = gf.components.ellipse(radii=(10, 4))
c = outline([e1, e2])
assert int(c.area()) == 52
if __name__ == "__main__":
e1 = gf.components.ellipse(radii=(6, 6))
e2 = gf.components.ellipse(radii=(10, 4))
c = outline([e1, e2])
c.show()
| gdsfactory/gdsfactory | gdsfactory/geometry/outline.py | Python | mit | 2,655 | 0.002637 |
"""
Test grade calculation.
"""
from django.http import Http404
from django.test import TestCase
from django.test.client import RequestFactory
from mock import patch, MagicMock
from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
from courseware.grades import (
grade,
iterate_grades_for,
ProgressSummary,
get_module_score
)
from courseware.module_render import get_module
from courseware.model_data import FieldDataCache, set_score
from courseware.tests.helpers import (
LoginEnrollmentTestCase,
get_request_for_user
)
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from student.tests.factories import UserFactory
from student.models import CourseEnrollment
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
def _grade_with_errors(student, course, keep_raw_scores=False):
"""This fake grade method will throw exceptions for student3 and
student4, but allow any other students to go through normal grading.
It's meant to simulate when something goes really wrong while trying to
grade a particular student, so we can test that we won't kill the entire
course grading run.
"""
if student.username in ['student3', 'student4']:
raise Exception("I don't like {}".format(student.username))
return grade(student, course, keep_raw_scores=keep_raw_scores)
@attr('shard_1')
class TestGradeIteration(SharedModuleStoreTestCase):
"""
Test iteration through student gradesets.
"""
COURSE_NUM = "1000"
COURSE_NAME = "grading_test_course"
@classmethod
def setUpClass(cls):
super(TestGradeIteration, cls).setUpClass()
cls.course = CourseFactory.create(
display_name=cls.COURSE_NAME,
number=cls.COURSE_NUM
)
def setUp(self):
"""
Create a course and a handful of users to assign grades
"""
super(TestGradeIteration, self).setUp()
self.students = [
UserFactory.create(username='student1'),
UserFactory.create(username='student2'),
UserFactory.create(username='student3'),
UserFactory.create(username='student4'),
UserFactory.create(username='student5'),
]
def test_empty_student_list(self):
"""If we don't pass in any students, it should return a zero-length
iterator, but it shouldn't error."""
gradeset_results = list(iterate_grades_for(self.course.id, []))
self.assertEqual(gradeset_results, [])
def test_nonexistent_course(self):
"""If the course we want to get grades for does not exist, a `Http404`
should be raised. This is a horrible crossing of abstraction boundaries
and should be fixed, but for now we're just testing the behavior. :-("""
with self.assertRaises(Http404):
gradeset_results = iterate_grades_for(SlashSeparatedCourseKey("I", "dont", "exist"), [])
gradeset_results.next()
def test_all_empty_grades(self):
"""No students have grade entries"""
all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students)
self.assertEqual(len(all_errors), 0)
for gradeset in all_gradesets.values():
self.assertIsNone(gradeset['grade'])
self.assertEqual(gradeset['percent'], 0.0)
@patch('courseware.grades.grade', _grade_with_errors)
def test_grading_exception(self):
"""Test that we correctly capture exception messages that bubble up from
grading. Note that we only see errors at this level if the grading
process for this student fails entirely due to an unexpected event --
having errors in the problem sets will not trigger this.
We patch the grade() method with our own, which will generate the errors
for student3 and student4.
"""
all_gradesets, all_errors = self._gradesets_and_errors_for(self.course.id, self.students)
student1, student2, student3, student4, student5 = self.students
self.assertEqual(
all_errors,
{
student3: "I don't like student3",
student4: "I don't like student4"
}
)
# But we should still have five gradesets
self.assertEqual(len(all_gradesets), 5)
# Even though two will simply be empty
self.assertFalse(all_gradesets[student3])
self.assertFalse(all_gradesets[student4])
# The rest will have grade information in them
self.assertTrue(all_gradesets[student1])
self.assertTrue(all_gradesets[student2])
self.assertTrue(all_gradesets[student5])
################################# Helpers #################################
def _gradesets_and_errors_for(self, course_id, students):
"""Simple helper method to iterate through student grades and give us
two dictionaries -- one that has all students and their respective
gradesets, and one that has only students that could not be graded and
their respective error messages."""
students_to_gradesets = {}
students_to_errors = {}
for student, gradeset, err_msg in iterate_grades_for(course_id, students):
students_to_gradesets[student] = gradeset
if err_msg:
students_to_errors[student] = err_msg
return students_to_gradesets, students_to_errors
class TestFieldDataCacheScorableLocations(SharedModuleStoreTestCase):
"""
Make sure we can filter the locations we pull back student state for via
the FieldDataCache.
"""
@classmethod
def setUpClass(cls):
super(TestFieldDataCacheScorableLocations, cls).setUpClass()
cls.course = CourseFactory.create()
chapter = ItemFactory.create(category='chapter', parent=cls.course)
sequential = ItemFactory.create(category='sequential', parent=chapter)
vertical = ItemFactory.create(category='vertical', parent=sequential)
ItemFactory.create(category='video', parent=vertical)
ItemFactory.create(category='html', parent=vertical)
ItemFactory.create(category='discussion', parent=vertical)
ItemFactory.create(category='problem', parent=vertical)
def setUp(self):
super(TestFieldDataCacheScorableLocations, self).setUp()
self.student = UserFactory.create()
CourseEnrollment.enroll(self.student, self.course.id)
class TestProgressSummary(TestCase):
"""
Test the method that calculates the score for a given block based on the
cumulative scores of its children. This test class uses a hard-coded block
hierarchy with scores as follows:
a
+--------+--------+
b c
+--------------+-----------+ |
d e f g
+-----+ +-----+-----+ | |
h i j k l m n
(2/5) (3/5) (0/1) - (1/3) - (3/10)
"""
# Tell Django to clean out all databases, not just default
multi_db = True
def setUp(self):
super(TestProgressSummary, self).setUp()
self.course_key = CourseLocator(
org='some_org',
course='some_course',
run='some_run'
)
self.loc_a = self.create_location('chapter', 'a')
self.loc_b = self.create_location('section', 'b')
self.loc_c = self.create_location('section', 'c')
self.loc_d = self.create_location('vertical', 'd')
self.loc_e = self.create_location('vertical', 'e')
self.loc_f = self.create_location('vertical', 'f')
self.loc_g = self.create_location('vertical', 'g')
self.loc_h = self.create_location('problem', 'h')
self.loc_i = self.create_location('problem', 'i')
self.loc_j = self.create_location('problem', 'j')
self.loc_k = self.create_location('html', 'k')
self.loc_l = self.create_location('problem', 'l')
self.loc_m = self.create_location('html', 'm')
self.loc_n = self.create_location('problem', 'n')
weighted_scores = {
self.loc_h: self.create_score(2, 5),
self.loc_i: self.create_score(3, 5),
self.loc_j: self.create_score(0, 1),
self.loc_l: self.create_score(1, 3),
self.loc_n: self.create_score(3, 10),
}
locations_to_scored_children = {
self.loc_a: [self.loc_h, self.loc_i, self.loc_j, self.loc_l, self.loc_n],
self.loc_b: [self.loc_h, self.loc_i, self.loc_j, self.loc_l],
self.loc_c: [self.loc_n],
self.loc_d: [self.loc_h, self.loc_i],
self.loc_e: [self.loc_j, self.loc_l],
self.loc_f: [],
self.loc_g: [self.loc_n],
self.loc_k: [],
self.loc_m: [],
}
self.progress_summary = ProgressSummary(
None, weighted_scores, locations_to_scored_children
)
def create_score(self, earned, possible):
"""
Create a new mock Score object with specified earned and possible values
"""
score = MagicMock()
score.possible = possible
score.earned = earned
return score
def create_location(self, block_type, block_id):
"""
Create a new BlockUsageLocation with the given type and ID.
"""
return BlockUsageLocator(
course_key=self.course_key, block_type=block_type, block_id=block_id
)
def test_score_chapter(self):
earned, possible = self.progress_summary.score_for_module(self.loc_a)
self.assertEqual(earned, 9)
self.assertEqual(possible, 24)
def test_score_section_many_leaves(self):
earned, possible = self.progress_summary.score_for_module(self.loc_b)
self.assertEqual(earned, 6)
self.assertEqual(possible, 14)
def test_score_section_one_leaf(self):
earned, possible = self.progress_summary.score_for_module(self.loc_c)
self.assertEqual(earned, 3)
self.assertEqual(possible, 10)
def test_score_vertical_two_leaves(self):
earned, possible = self.progress_summary.score_for_module(self.loc_d)
self.assertEqual(earned, 5)
self.assertEqual(possible, 10)
def test_score_vertical_two_leaves_one_unscored(self):
earned, possible = self.progress_summary.score_for_module(self.loc_e)
self.assertEqual(earned, 1)
self.assertEqual(possible, 4)
def test_score_vertical_no_score(self):
earned, possible = self.progress_summary.score_for_module(self.loc_f)
self.assertEqual(earned, 0)
self.assertEqual(possible, 0)
def test_score_vertical_one_leaf(self):
earned, possible = self.progress_summary.score_for_module(self.loc_g)
self.assertEqual(earned, 3)
self.assertEqual(possible, 10)
def test_score_leaf(self):
earned, possible = self.progress_summary.score_for_module(self.loc_h)
self.assertEqual(earned, 2)
self.assertEqual(possible, 5)
def test_score_leaf_no_score(self):
earned, possible = self.progress_summary.score_for_module(self.loc_m)
self.assertEqual(earned, 0)
self.assertEqual(possible, 0)
class TestGetModuleScore(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
Test get_module_score
"""
@classmethod
def setUpClass(cls):
super(TestGetModuleScore, cls).setUpClass()
cls.course = CourseFactory.create()
cls.chapter = ItemFactory.create(
parent=cls.course,
category="chapter",
display_name="Test Chapter"
)
cls.seq1 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 1",
graded=True
)
cls.seq2 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 2",
graded=True
)
cls.seq3 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 3",
graded=True
)
cls.vert1 = ItemFactory.create(
parent=cls.seq1,
category='vertical',
display_name='Test Vertical 1'
)
cls.vert2 = ItemFactory.create(
parent=cls.seq2,
category='vertical',
display_name='Test Vertical 2'
)
cls.vert3 = ItemFactory.create(
parent=cls.seq3,
category='vertical',
display_name='Test Vertical 3'
)
cls.randomize = ItemFactory.create(
parent=cls.vert2,
category='randomize',
display_name='Test Randomize'
)
cls.library_content = ItemFactory.create(
parent=cls.vert3,
category='library_content',
display_name='Test Library Content'
)
problem_xml = MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
cls.problem1 = ItemFactory.create(
parent=cls.vert1,
category="problem",
display_name="Test Problem 1",
data=problem_xml
)
cls.problem2 = ItemFactory.create(
parent=cls.vert1,
category="problem",
display_name="Test Problem 2",
data=problem_xml
)
cls.problem3 = ItemFactory.create(
parent=cls.randomize,
category="problem",
display_name="Test Problem 3",
data=problem_xml
)
cls.problem4 = ItemFactory.create(
parent=cls.randomize,
category="problem",
display_name="Test Problem 4",
data=problem_xml
)
cls.problem5 = ItemFactory.create(
parent=cls.library_content,
category="problem",
display_name="Test Problem 5",
data=problem_xml
)
cls.problem6 = ItemFactory.create(
parent=cls.library_content,
category="problem",
display_name="Test Problem 6",
data=problem_xml
)
def setUp(self):
"""
Set up test course
"""
super(TestGetModuleScore, self).setUp()
self.request = get_request_for_user(UserFactory())
self.client.login(username=self.request.user.username, password="test")
CourseEnrollment.enroll(self.request.user, self.course.id)
def test_get_module_score(self):
"""
Test test_get_module_score
"""
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 0)
answer_problem(self.course, self.request, self.problem1)
answer_problem(self.course, self.request, self.problem2)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 1.0)
answer_problem(self.course, self.request, self.problem1)
answer_problem(self.course, self.request, self.problem2, 0)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, .5)
def test_get_module_score_with_empty_score(self):
"""
Test test_get_module_score_with_empty_score
"""
set_score(self.request.user.id, self.problem1.location, None, None) # pylint: disable=no-member
set_score(self.request.user.id, self.problem2.location, None, None) # pylint: disable=no-member
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 0)
answer_problem(self.course, self.request, self.problem1)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 0.5)
answer_problem(self.course, self.request, self.problem2)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 1.0)
def test_get_module_score_with_randomize(self):
"""
Test test_get_module_score_with_randomize
"""
answer_problem(self.course, self.request, self.problem3)
answer_problem(self.course, self.request, self.problem4)
score = get_module_score(self.request.user, self.course, self.seq2)
self.assertEqual(score, 1.0)
def test_get_module_score_with_library_content(self):
"""
Test test_get_module_score_with_library_content
"""
answer_problem(self.course, self.request, self.problem5)
answer_problem(self.course, self.request, self.problem6)
score = get_module_score(self.request.user, self.course, self.seq3)
self.assertEqual(score, 1.0)
def answer_problem(course, request, problem, score=1):
"""
Records a correct answer for the given problem.
Arguments:
course (Course): Course object, the course the required problem is in
request (Request): request Object
problem (xblock): xblock object, the problem to be answered
"""
user = request.user
grade_dict = {'value': score, 'max_value': 1, 'user_id': user.id}
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id,
user,
course,
depth=2
)
# pylint: disable=protected-access
module = get_module(
user,
request,
problem.scope_ids.usage_id,
field_data_cache,
)._xmodule
module.system.publish(problem, 'grade', grade_dict)
| shabab12/edx-platform | lms/djangoapps/courseware/tests/test_grades.py | Python | agpl-3.0 | 18,523 | 0.000702 |
from opensfm.actions import create_submodels
from . import command
import argparse
from opensfm.dataset import DataSet
class Command(command.CommandBase):
name = "create_submodels"
help = "Split the dataset into smaller submodels"
def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None:
create_submodels.run_dataset(dataset)
def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None:
pass
| mapillary/OpenSfM | opensfm/commands/create_submodels.py | Python | bsd-2-clause | 454 | 0 |
'''
Created on Dec 13, 2015
@author: Shannon Litwin
'''
import Adafruit_BBIO.GPIO as GPIO
import Adafruit_BBIO.PWM as PWM
import Lib_LCD as LCD
import Lib_Main as BBB
import sys
import signal
import time
leftForward = "P8_46"
leftBackward = "P8_45"
rightForward = "P9_14"
rightBackward = "P9_16"
def Control_C_Exit(signal, frame):
GPIO.cleanup()
PWM.cleanup()
print("\nProgram halted! Exiting program!")
sys.exit()
signal.signal(signal.SIGINT, Control_C_Exit) # For cleaning up mid run
'''Keep to show Dr. Berry'''
LCD.init()
time.sleep(1)
LCD.backlight("on")
time.sleep(2)
LCD.backlight("off")
time.sleep(1)
line_message = "Hi Dr. Berry."
LCD.write_line(line_message)
time.sleep(5)
LCD.cursor_home()
long_message = "This is 35 chars and needs 2 lines."
LCD.write_screen(long_message)
time.sleep(5)
LCD.cursor_home()
long_message = "Which is fine because the screen can hold up to 80 characters."
LCD.write_screen(long_message)
time.sleep(5)
LCD.cursor_home()
long_message = "However, if the message is too long it will truncate. That is why you cannot read this entire message."
LCD.write_screen(long_message)
time.sleep(5)
LCD.clear()
m1 = "It works 1"
m2 = "It works 2"
m3 = "It works 3"
m4 = "It works 4"
time.sleep(1)
LCD.goto_line(4)
LCD.write_line(m4)
time.sleep(1)
LCD.goto_line(3)
LCD.write_line(m3)
time.sleep(1)
LCD.goto_line(2)
LCD.write_line(m2)
time.sleep(1)
LCD.goto_line(1)
LCD.write_line(m1)
LCD.clear()
#pause with while loop example
#start = time.time()
#end = time.time()
#while((end - start) < 3):
# end = time.time()
BBB.cleanup_all()
| ValRose/Rose_Bone | PythonLibraries/lcd_demo.py | Python | mit | 1,617 | 0.006184 |
from lib.flowchart.nodes.generalNode import NodeWithCtrlWidget
class myNode(NodeWithCtrlWidget):
'''This is test docstring'''
nodeName = 'myTestNode'
uiTemplate = [{'name': 'HNO3', 'type': 'list', 'value': 'Closest Time'},
{'name': 'C2H5OH', 'type': 'bool', 'value': 0},
{'name': 'H20', 'type': 'str', 'value': '?/?'}]
def __init__(self, name, **kwargs):
super(myNode, self).__init__(name, terminals={'In': {'io': 'in'}, 'Out': {'io': 'out'}}, **kwargs)
def process(self, In):
print ('processing')
| cdd1969/pygwa | lib/flowchart/nodes/n000_testnode/myNode.py | Python | gpl-2.0 | 578 | 0.00519 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Sarielsaz Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of headers messages to announce blocks.
Setup:
- Two nodes, two p2p connections to node0. One p2p connection should only ever
receive inv's (omitted from testing description below, this is our control).
Second node is used for creating reorgs.
Part 1: No headers announcements before "sendheaders"
a. node mines a block [expect: inv]
send getdata for the block [expect: block]
b. node mines another block [expect: inv]
send getheaders and getdata [expect: headers, then block]
c. node mines another block [expect: inv]
peer mines a block, announces with header [expect: getdata]
d. node mines another block [expect: inv]
Part 2: After "sendheaders", headers announcements should generally work.
a. peer sends sendheaders [expect: no response]
peer sends getheaders with current tip [expect: no response]
b. node mines a block [expect: tip header]
c. for N in 1, ..., 10:
* for announce-type in {inv, header}
- peer mines N blocks, announces with announce-type
[ expect: getheaders/getdata or getdata, deliver block(s) ]
- node mines a block [ expect: 1 header ]
Part 3: Headers announcements stop after large reorg and resume after getheaders or inv from peer.
- For response-type in {inv, getheaders}
* node mines a 7 block reorg [ expect: headers announcement of 8 blocks ]
* node mines an 8-block reorg [ expect: inv at tip ]
* peer responds with getblocks/getdata [expect: inv, blocks ]
* node mines another block [ expect: inv at tip, peer sends getdata, expect: block ]
* node mines another block at tip [ expect: inv ]
* peer responds with getheaders with an old hashstop more than 8 blocks back [expect: headers]
* peer requests block [ expect: block ]
* node mines another block at tip [ expect: inv, peer sends getdata, expect: block ]
* peer sends response-type [expect headers if getheaders, getheaders/getdata if mining new block]
* node mines 1 block [expect: 1 header, peer responds with getdata]
Part 4: Test direct fetch behavior
a. Announce 2 old block headers.
Expect: no getdata requests.
b. Announce 3 new blocks via 1 headers message.
Expect: one getdata request for all 3 blocks.
(Send blocks.)
c. Announce 1 header that forks off the last two blocks.
Expect: no response.
d. Announce 1 more header that builds on that fork.
Expect: one getdata request for two blocks.
e. Announce 16 more headers that build on that fork.
Expect: getdata request for 14 more blocks.
f. Announce 1 more header that builds on that fork.
Expect: no response.
Part 5: Test handling of headers that don't connect.
a. Repeat 10 times:
1. Announce a header that doesn't connect.
Expect: getheaders message
2. Send headers chain.
Expect: getdata for the missing blocks, tip update.
b. Then send 9 more headers that don't connect.
Expect: getheaders message each time.
c. Announce a header that does connect.
Expect: no response.
d. Announce 49 headers that don't connect.
Expect: getheaders message each time.
e. Announce one more that doesn't connect.
Expect: disconnect.
"""
from test_framework.mininode import *
from test_framework.test_framework import SarielsazTestFramework
from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase
direct_fetch_response_time = 0.05
class TestNode(NodeConnCB):
def __init__(self):
super().__init__()
self.block_announced = False
self.last_blockhash_announced = None
def clear_last_announcement(self):
with mininode_lock:
self.block_announced = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
# Request data for a list of block hashes
def get_data(self, block_hashes):
msg = msg_getdata()
for x in block_hashes:
msg.inv.append(CInv(2, x))
self.connection.send_message(msg)
def get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
self.connection.send_message(msg)
def send_block_inv(self, blockhash):
msg = msg_inv()
msg.inv = [CInv(2, blockhash)]
self.connection.send_message(msg)
def on_inv(self, conn, message):
self.block_announced = True
self.last_blockhash_announced = message.inv[-1].hash
def on_headers(self, conn, message):
if len(message.headers):
self.block_announced = True
message.headers[-1].calc_sha256()
self.last_blockhash_announced = message.headers[-1].sha256
# Test whether the last announcement we received had the
# right header or the right inv
# inv and headers should be lists of block hashes
def check_last_announcement(self, headers=None, inv=None):
expect_headers = headers if headers != None else []
expect_inv = inv if inv != None else []
test_function = lambda: self.block_announced
wait_until(test_function, timeout=60, lock=mininode_lock)
with mininode_lock:
self.block_announced = False
success = True
compare_inv = []
if "inv" in self.last_message:
compare_inv = [x.hash for x in self.last_message["inv"].inv]
if compare_inv != expect_inv:
success = False
hash_headers = []
if "headers" in self.last_message:
# treat headers as a list of block hashes
hash_headers = [ x.sha256 for x in self.last_message["headers"].headers ]
if hash_headers != expect_headers:
success = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
return success
def wait_for_getdata(self, hash_list, timeout=60):
if hash_list == []:
return
test_function = lambda: "getdata" in self.last_message and [x.hash for x in self.last_message["getdata"].inv] == hash_list
wait_until(test_function, timeout=timeout, lock=mininode_lock)
return
def wait_for_block_announcement(self, block_hash, timeout=60):
test_function = lambda: self.last_blockhash_announced == block_hash
wait_until(test_function, timeout=timeout, lock=mininode_lock)
return
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [ CBlockHeader(b) for b in new_blocks ]
self.send_message(headers_message)
def send_getblocks(self, locator):
getblocks_message = msg_getblocks()
getblocks_message.locator.vHave = locator
self.send_message(getblocks_message)
class SendHeadersTest(SarielsazTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
# mine count blocks and return the new tip
def mine_blocks(self, count):
# Clear out last block announcement from each p2p listener
[ x.clear_last_announcement() for x in self.p2p_connections ]
self.nodes[0].generate(count)
return int(self.nodes[0].getbestblockhash(), 16)
# mine a reorg that invalidates length blocks (replacing them with
# length+1 blocks).
# Note: we clear the state of our p2p connections after the
# to-be-reorged-out blocks are mined, so that we don't break later tests.
# return the list of block hashes newly mined
def mine_reorg(self, length):
self.nodes[0].generate(length) # make sure all invalidated blocks are node0's
sync_blocks(self.nodes, wait=0.1)
for x in self.p2p_connections:
x.wait_for_block_announcement(int(self.nodes[0].getbestblockhash(), 16))
x.clear_last_announcement()
tip_height = self.nodes[1].getblockcount()
hash_to_invalidate = self.nodes[1].getblockhash(tip_height-(length-1))
self.nodes[1].invalidateblock(hash_to_invalidate)
all_hashes = self.nodes[1].generate(length+1) # Must be longer than the orig chain
sync_blocks(self.nodes, wait=0.1)
return [int(x, 16) for x in all_hashes]
def run_test(self):
# Setup the p2p connections and start up the network thread.
inv_node = TestNode()
test_node = TestNode()
self.p2p_connections = [inv_node, test_node]
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], inv_node))
# Set nServices to 0 for test_node, so no block download will occur outside of
# direct fetching
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node, services=0))
inv_node.add_connection(connections[0])
test_node.add_connection(connections[1])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
inv_node.wait_for_verack()
test_node.wait_for_verack()
tip = int(self.nodes[0].getbestblockhash(), 16)
# PART 1
# 1. Mine a block; expect inv announcements each time
self.log.info("Part 1: headers don't start before sendheaders message...")
for i in range(4):
old_tip = tip
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(inv=[tip]), True)
# Try a few different responses; none should affect next announcement
if i == 0:
# first request the block
test_node.get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# next try requesting header and block
test_node.get_headers(locator=[old_tip], hashstop=tip)
test_node.get_data([tip])
test_node.wait_for_block(tip)
test_node.clear_last_announcement() # since we requested headers...
elif i == 2:
# this time announce own block via headers
height = self.nodes[0].getblockcount()
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
new_block = create_block(tip, create_coinbase(height+1), block_time)
new_block.solve()
test_node.send_header_for_blocks([new_block])
test_node.wait_for_getdata([new_block.sha256])
test_node.send_message(msg_block(new_block))
test_node.sync_with_ping() # make sure this block is processed
inv_node.clear_last_announcement()
test_node.clear_last_announcement()
self.log.info("Part 1: success!")
self.log.info("Part 2: announce blocks with headers after sendheaders message...")
# PART 2
# 2. Send a sendheaders message and test that headers announcements
# commence and keep working.
test_node.send_message(msg_sendheaders())
prev_tip = int(self.nodes[0].getbestblockhash(), 16)
test_node.get_headers(locator=[prev_tip], hashstop=0)
test_node.sync_with_ping()
# Now that we've synced headers, headers announcements should work
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=[tip]), True)
height = self.nodes[0].getblockcount()+1
block_time += 10 # Advance far enough ahead
for i in range(10):
# Mine i blocks, and alternate announcing either via
# inv (of tip) or via headers. After each, new blocks
# mined by the node should successfully be announced
# with block header, even though the blocks are never requested
for j in range(2):
blocks = []
for b in range(i+1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
if j == 0:
# Announce via inv
test_node.send_block_inv(tip)
test_node.wait_for_getheaders()
# Should have received a getheaders now
test_node.send_header_for_blocks(blocks)
# Test that duplicate inv's won't result in duplicate
# getdata requests, or duplicate headers announcements
[ inv_node.send_block_inv(x.sha256) for x in blocks ]
test_node.wait_for_getdata([x.sha256 for x in blocks])
inv_node.sync_with_ping()
else:
# Announce via headers
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks])
# Test that duplicate headers won't result in duplicate
# getdata requests (the check is further down)
inv_node.send_header_for_blocks(blocks)
inv_node.sync_with_ping()
[ test_node.send_message(msg_block(x)) for x in blocks ]
test_node.sync_with_ping()
inv_node.sync_with_ping()
# This block should not be announced to the inv node (since it also
# broadcast it)
assert "inv" not in inv_node.last_message
assert "headers" not in inv_node.last_message
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=[tip]), True)
height += 1
block_time += 1
self.log.info("Part 2: success!")
self.log.info("Part 3: headers announcements can stop after large reorg, and resume after headers/inv from peer...")
# PART 3. Headers announcements can stop after large reorg, and resume after
# getheaders or inv from peer.
for j in range(2):
# First try mining a reorg that can propagate with header announcement
new_block_hashes = self.mine_reorg(length=7)
tip = new_block_hashes[-1]
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=new_block_hashes), True)
block_time += 8
# Mine a too-large reorg, which should be announced with a single inv
new_block_hashes = self.mine_reorg(length=8)
tip = new_block_hashes[-1]
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(inv=[tip]), True)
block_time += 9
fork_point = self.nodes[0].getblock("%02x" % new_block_hashes[0])["previousblockhash"]
fork_point = int(fork_point, 16)
# Use getblocks/getdata
test_node.send_getblocks(locator = [fork_point])
assert_equal(test_node.check_last_announcement(inv=new_block_hashes), True)
test_node.get_data(new_block_hashes)
test_node.wait_for_block(new_block_hashes[-1])
for i in range(3):
# Mine another block, still should get only an inv
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(inv=[tip]), True)
if i == 0:
# Just get the data -- shouldn't cause headers announcements to resume
test_node.get_data([tip])
test_node.wait_for_block(tip)
elif i == 1:
# Send a getheaders message that shouldn't trigger headers announcements
# to resume (best header sent will be too old)
test_node.get_headers(locator=[fork_point], hashstop=new_block_hashes[1])
test_node.get_data([tip])
test_node.wait_for_block(tip)
elif i == 2:
test_node.get_data([tip])
test_node.wait_for_block(tip)
# This time, try sending either a getheaders to trigger resumption
# of headers announcements, or mine a new block and inv it, also
# triggering resumption of headers announcements.
if j == 0:
test_node.get_headers(locator=[tip], hashstop=0)
test_node.sync_with_ping()
else:
test_node.send_block_inv(tip)
test_node.sync_with_ping()
# New blocks should now be announced with header
tip = self.mine_blocks(1)
assert_equal(inv_node.check_last_announcement(inv=[tip]), True)
assert_equal(test_node.check_last_announcement(headers=[tip]), True)
self.log.info("Part 3: success!")
self.log.info("Part 4: Testing direct fetch behavior...")
tip = self.mine_blocks(1)
height = self.nodes[0].getblockcount() + 1
last_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time']
block_time = last_time + 1
# Create 2 blocks. Send the blocks, then send the headers.
blocks = []
for b in range(2):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
inv_node.send_message(msg_block(blocks[-1]))
inv_node.sync_with_ping() # Make sure blocks are processed
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
# should not have received any getdata messages
with mininode_lock:
assert "getdata" not in test_node.last_message
# This time, direct fetch should work
blocks = []
for b in range(3):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
test_node.send_header_for_blocks(blocks)
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks], timeout=direct_fetch_response_time)
[ test_node.send_message(msg_block(x)) for x in blocks ]
test_node.sync_with_ping()
# Now announce a header that forks the last two blocks
tip = blocks[0].sha256
height -= 1
blocks = []
# Create extra blocks for later
for b in range(20):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
# Announcing one block on fork should not trigger direct fetch
# (less work than tip)
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks[0:1])
test_node.sync_with_ping()
with mininode_lock:
assert "getdata" not in test_node.last_message
# Announcing one more block on fork should trigger direct fetch for
# both blocks (same work as tip)
test_node.send_header_for_blocks(blocks[1:2])
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks[0:2]], timeout=direct_fetch_response_time)
# Announcing 16 more headers should trigger direct fetch for 14 more
# blocks
test_node.send_header_for_blocks(blocks[2:18])
test_node.sync_with_ping()
test_node.wait_for_getdata([x.sha256 for x in blocks[2:16]], timeout=direct_fetch_response_time)
# Announcing 1 more header should not trigger any response
test_node.last_message.pop("getdata", None)
test_node.send_header_for_blocks(blocks[18:19])
test_node.sync_with_ping()
with mininode_lock:
assert "getdata" not in test_node.last_message
self.log.info("Part 4: success!")
# Now deliver all those blocks we announced.
[ test_node.send_message(msg_block(x)) for x in blocks ]
self.log.info("Part 5: Testing handling of unconnecting headers")
# First we test that receipt of an unconnecting header doesn't prevent
# chain sync.
for i in range(10):
test_node.last_message.pop("getdata", None)
blocks = []
# Create two more blocks.
for j in range(2):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
# Send the header of the second block -> this won't connect.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[1]])
test_node.wait_for_getheaders()
test_node.send_header_for_blocks(blocks)
test_node.wait_for_getdata([x.sha256 for x in blocks])
[ test_node.send_message(msg_block(x)) for x in blocks ]
test_node.sync_with_ping()
assert_equal(int(self.nodes[0].getbestblockhash(), 16), blocks[1].sha256)
blocks = []
# Now we test that if we repeatedly don't send connecting headers, we
# don't go into an infinite loop trying to get them to connect.
MAX_UNCONNECTING_HEADERS = 10
for j in range(MAX_UNCONNECTING_HEADERS+1):
blocks.append(create_block(tip, create_coinbase(height), block_time))
blocks[-1].solve()
tip = blocks[-1].sha256
block_time += 1
height += 1
for i in range(1, MAX_UNCONNECTING_HEADERS):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[i]])
test_node.wait_for_getheaders()
# Next header will connect, should re-set our count:
test_node.send_header_for_blocks([blocks[0]])
# Remove the first two entries (blocks[1] would connect):
blocks = blocks[2:]
# Now try to see how many unconnecting headers we can send
# before we get disconnected. Should be 5*MAX_UNCONNECTING_HEADERS
for i in range(5*MAX_UNCONNECTING_HEADERS - 1):
# Send a header that doesn't connect, check that we get a getheaders.
with mininode_lock:
test_node.last_message.pop("getheaders", None)
test_node.send_header_for_blocks([blocks[i%len(blocks)]])
test_node.wait_for_getheaders()
# Eventually this stops working.
test_node.send_header_for_blocks([blocks[-1]])
# Should get disconnected
test_node.wait_for_disconnect()
self.log.info("Part 5: success!")
# Finally, check that the inv node never received a getdata request,
# throughout the test
assert "getdata" not in inv_node.last_message
if __name__ == '__main__':
SendHeadersTest().main()
| sarielsaz/sarielsaz | test/functional/sendheaders.py | Python | mit | 24,054 | 0.003492 |
#!/usr/bin/env python
#------------------------------------------------------------------------------
# Copyright 2014 Esri
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==============================================================================
#Name: ListServiceWorkspaces.py
#
#Purpose: Output service workspace information for each service in
# specified ArcGIS Server site.
#
#==============================================================================
import sys, os, traceback, datetime, ast, copy, json
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'SupportFiles'))
from AGSRestFunctions import getServiceList
from AGSRestFunctions import getServiceManifest
scriptName = os.path.basename(sys.argv[0])
exitErrCode = 1
debug = False
sectionBreak = '=' * 175
sectionBreak1 = '-' * 175
def check_args():
# ---------------------------------------------------------------------
# Check arguments
# ---------------------------------------------------------------------
if len(sys.argv) <> 6:
print '\n' + scriptName + ' <Server_FullyQualifiedDomainName> <Server_Port> <User_Name> <Password> <Use_SSL: Yes|No>'
print '\nWhere:'
print '\n\t<Server_FullyQualifiedDomainName> (required): the fully qualified domain name of the ArcGIS Server machine.'
print '\n\t<Server_Port> (required): the port number of the ArcGIS Server (specify # if no port).'
print '\n\t<User_Name> (required): ArcGIS Server for ArcGIS site administrator.'
print '\n\t<Password> (required): Password for ArcGIS Server for ArcGIS site administrator user.'
print '\n\t<Use_SSL: Yes|No> (required) Flag indicating if ArcGIS Server requires HTTPS.\n'
return None
else:
# Set variables from parameter values
server = sys.argv[1]
port = sys.argv[2]
adminuser = sys.argv[3]
password = sys.argv[4]
useSSL = sys.argv[5]
if port.strip() == '#':
port = None
if useSSL.strip().lower() in ['yes', 'ye', 'y']:
useSSL = True
else:
useSSL = False
return server, port, adminuser, password, useSSL
def parseService(service):
# Parse folder and service nameType
folder = None
serviceNameType = None
parsedService = service.split('//')
if len(parsedService) == 1:
serviceNameType = parsedService[0]
else:
folder = parsedService[0]
serviceNameType = parsedService[1]
return folder, serviceNameType
def main():
totalSuccess = True
# -------------------------------------------------
# Check arguments
# -------------------------------------------------
results = check_args()
if not results:
sys.exit(exitErrCode)
server, port, adminuser, password, useSSL = results
if debug:
print server, port, adminuser, password, useSSL
try:
# -------------------------------------------------
# Get all services that exist on server
# -------------------------------------------------
if useSSL:
protocol = 'https'
else:
protocol = 'http'
allServices = getServiceList(server, port, adminuser, password)
# Remove certain services from collection
excludeServices = ['SampleWorldCities.MapServer']
services = [service for service in allServices if service not in excludeServices]
if len(services) == 0:
raise Exception('ERROR: There are no user published ArcGIS Server services. Have you published the ArcGIS Server services?')
# -------------------------------------------------
# List service workspaces
# -------------------------------------------------
numServices = len(services)
i = 0
# Print header
print 'ArcGIS Server|Service|On Server Connection String/or Path'
for service in services:
onServerStr = ''
folder, serviceNameType = parseService(service)
serviceManifest = getServiceManifest(server, port, adminuser, password, folder, serviceNameType)
databases = serviceManifest.get('databases')
if databases:
onServerConnStr = databases[0].get('onServerConnectionString')
if onServerConnStr:
if onServerConnStr.find('DB_CONNECTION_PROPERTIES') > 0:
# It's an enterprise geodatabase
onServerStr = onServerConnStr.split('DB_CONNECTION_PROPERTIES=')[1]
else:
onServerStr = onServerConnStr.replace('DATABASE=','')
else:
if serviceNameType.find('.GeocodeServer') > 0 or serviceNameType.find('.GPServer') > 0:
onServerStr = serviceManifest.get('resources')[0].get('serverPath')
if len(onServerStr) == 0:
onServerStr = str(serviceManifest)
print '{}|{}|{}'.format(server, service, onServerStr)
except:
totalSuccess = False
# Get the traceback object
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
# Concatenate information together concerning the error into a message string
pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
# Print Python error messages for use in Python / Python Window
print
print "***** ERROR ENCOUNTERED *****"
print pymsg + "\n"
finally:
if totalSuccess:
sys.exit(0)
else:
sys.exit(1)
if __name__ == "__main__":
main()
| Esri/ops-server-config | Utilities/ListServiceWorkspaces.py | Python | apache-2.0 | 6,504 | 0.009533 |
import math
import RingGameGlobals
import RingAction
import RingTracks
import RingTrack
import RingTrackGroup
from direct.showbase import PythonUtil
STATIC = 0
SIMPLE = 1
COMPLEX = 2
def getRandomRingTrackGroup(type, numRings, rng):
global trackListGenFuncs
funcTable = trackListGenFuncs[type][numRings - 1]
func = rng.choice(funcTable)
tracks, tOffsets, period = func(numRings, rng)
tracks, tOffsets = __scramble(tracks, tOffsets, rng)
trackGroup = RingTrackGroup.RingTrackGroup(tracks, period, trackTOffsets=tOffsets, reverseFlag=rng.choice([0, 1]), tOffset=rng.random())
return trackGroup
def __scramble(tracks, tOffsets, rng):
newTracks = []
if tOffsets == None:
newTOffsets = None
else:
newTOffsets = []
used = [0] * len(tracks)
count = 0
while count < len(tracks):
i = rng.randint(0, len(tracks) - 1)
if not used[i]:
used[i] = 1
count += 1
newTracks.append(tracks[i])
if newTOffsets != None:
newTOffsets.append(tOffsets[i])
return (newTracks, newTOffsets)
def angleToXY(angle, radius = 1.0):
return [radius * math.sin(angle), radius * math.cos(angle)]
def getTightCircleStaticPositions(numRings):
positions = []
if numRings == 1:
positions.append([0, 0])
else:
radius = RingGameGlobals.RING_RADIUS * 1.5 / RingGameGlobals.MAX_TOONXZ
step = 2.0 * math.pi / float(numRings)
for i in range(0, numRings):
angle = i * step + step / 2.0
positions.append(angleToXY(angle, 1.0 / 3.0))
return positions
def get_keypad(numRings, rng):
positions = (RingTracks.center,
RingTracks.up,
RingTracks.down,
RingTracks.left,
RingTracks.right,
RingTracks.ul,
RingTracks.ur,
RingTracks.lr,
RingTracks.ll)
tracks = []
usedPositions = [None]
posScale = 0.7 + rng.random() * 0.2
for i in range(0, numRings):
pos = None
while pos in usedPositions:
pos = rng.choice(positions)
usedPositions.append(pos)
scaledPos = [0, 0]
scaledPos[0] = pos[0] * posScale
scaledPos[1] = pos[1] * posScale
action = RingAction.RingActionStaticPos(scaledPos)
track = RingTrack.RingTrack([action], [1.0])
tracks.append(track)
return (tracks, None, 1.0)
fullCirclePeriod = 6.0
plusPeriod = 4.0
def get_evenCircle(numRings, rng):
tracks = []
tOffsets = []
for i in range(0, numRings):
actions, durations = RingTracks.getCircleRingActions()
track = RingTrack.RingTrack(actions, durations)
tracks.append(track)
tOffsets.append(float(i) / numRings)
return (tracks, tOffsets, fullCirclePeriod)
def get_followCircle(numRings, rng):
tracks = []
tOffsets = []
for i in range(0, numRings):
actions, durations = RingTracks.getCircleRingActions()
track = RingTrack.RingTrack(actions, durations)
delay = 0.12
tracks.append(track)
tOffsets.append(float(i) * delay)
return (tracks, tOffsets, fullCirclePeriod)
def get_evenCircle_withStationaryCenterRings(numRings, rng):
tracks = []
tOffsets = []
numCenterRings = rng.randint(1, numRings - 1)
positions = getTightCircleStaticPositions(numCenterRings)
for i in range(0, numCenterRings):
action = RingAction.RingActionStaticPos(positions[i])
track = RingTrack.RingTrack([action])
tracks.append(track)
tOffsets.append(0)
numOuterRings = numRings - numCenterRings
for i in range(0, numOuterRings):
actions, durations = RingTracks.getCircleRingActions()
track = RingTrack.RingTrack(actions, durations)
tracks.append(track)
tOffsets.append(float(i) / numOuterRings)
return (tracks, tOffsets, fullCirclePeriod)
def __get_Slots(numRings, rng, vertical = 1):
tracks = []
tOffsets = []
fpTab = []
for i in range(numRings):
fpTab.append(PythonUtil.lineupPos(i, numRings, 2.0 / 3))
offset = 1 - fpTab[-1]
offset = rng.random() * (offset * 2) - offset
fpTab = map(lambda x: x + offset, fpTab)
for i in range(0, numRings):
if vertical:
getActionsFunc = RingTracks.getVerticalSlotActions
else:
getActionsFunc = RingTracks.getHorizontalSlotActions
actions, durations = getActionsFunc(fpTab[i])
track = RingTrack.RingTrack(actions, durations)
tracks.append(track)
tOffsets.append(float(i) / numRings * 0.5)
return (tracks, tOffsets, fullCirclePeriod)
def get_verticalSlots(numRings, rng):
return __get_Slots(numRings, rng, vertical=1)
def get_horizontalSlots(numRings, rng):
return __get_Slots(numRings, rng, vertical=0)
def get_plus(numRings, rng):
up = RingTracks.getPlusUpRingActions
down = RingTracks.getPlusDownRingActions
left = RingTracks.getPlusLeftRingActions
right = RingTracks.getPlusRightRingActions
actionSets = {2: [[up, down], [left, right]],
3: [[up, left, right],
[left, up, down],
[down, left, right],
[right, up, down]],
4: [[up,
down,
left,
right]]}
tracks = []
actionSet = rng.choice(actionSets[numRings])
for i in range(0, numRings):
actions, durations = actionSet[i]()
track = RingTrack.RingTrack(actions, durations)
tracks.append(track)
return (tracks, [0] * numRings, plusPeriod)
infinityPeriod = 5.0
fullCirclePeriodFaster = 5.0
plusPeriodFaster = 2.5
infinityTOffsets = []
def __initInfinityTOffsets():
global infinityTOffsets
offsets = [[],
[],
[],
[]]
offsets[0] = [0.0]
offsets[1] = [0.0, 3.0 / 4.0]
offsets[2] = [0.0, 1.0 / 3.0, 2.0 / 3.0]
inc = 14.0 / 23.0
for numRings in range(4, 5):
o = [0] * numRings
accum = 0.0
for i in range(0, numRings):
o[i] = accum % 1.0
accum += inc
offsets[numRings - 1] = o
infinityTOffsets = offsets
__initInfinityTOffsets()
def get_vertInfinity(numRings, rng):
tracks = []
for i in range(0, numRings):
actions, durations = RingTracks.getVerticalInfinityRingActions()
track = RingTrack.RingTrack(actions, durations)
tracks.append(track)
return (tracks, infinityTOffsets[numRings - 1], infinityPeriod)
def get_horizInfinity(numRings, rng):
tracks = []
for i in range(0, numRings):
actions, durations = RingTracks.getHorizontalInfinityRingActions()
track = RingTrack.RingTrack(actions, durations)
tracks.append(track)
return (tracks, infinityTOffsets[numRings - 1], infinityPeriod)
def get_evenCircle_withStationaryCenterRings_FASTER(numRings, rng):
tracks, tOffsets, period = get_evenCircle_withStationaryCenterRings(numRings, rng)
return (tracks, tOffsets, fullCirclePeriodFaster)
def get_plus_FASTER(numRings, rng):
tracks, tOffsets, period = get_plus(numRings, rng)
return (tracks, tOffsets, plusPeriodFaster)
allFuncs = [[get_keypad], [get_evenCircle,
get_followCircle,
get_evenCircle_withStationaryCenterRings,
get_verticalSlots,
get_horizontalSlots,
get_plus], [get_vertInfinity,
get_horizInfinity,
get_evenCircle_withStationaryCenterRings_FASTER,
get_plus_FASTER]]
dontUseFuncs = [[get_followCircle,
get_evenCircle_withStationaryCenterRings,
get_evenCircle_withStationaryCenterRings_FASTER,
get_plus,
get_plus_FASTER],
[],
[],
[]]
trackListGenFuncs = []
def __listComplement(list1, list2):
result = []
for item in list1:
if item not in list2:
result.append(item)
return result
def __initFuncTables():
global trackListGenFuncs
table = [[], [], []]
for diff in range(0, len(table)):
table[diff] = [[],
[],
[],
[]]
for numRings in range(0, len(table[diff])):
table[diff][numRings] = __listComplement(allFuncs[diff], dontUseFuncs[numRings])
trackListGenFuncs = table
__initFuncTables()
| ksmit799/Toontown-Source | toontown/minigame/RingTrackGroups.py | Python | mit | 8,136 | 0.004671 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
package/module TEST
Descripción del test.
Autor: PABLO PIZARRO @ github.com/ppizarror
Fecha: AGOSTO 2016
Licencia: GPLv2
"""
__author__ = "ppizarror"
# Importación de librerías
# noinspection PyUnresolvedReferences
from _testpath import * # @UnusedWildImport
import unittest
# Constantes de los test
DISABLE_HEAVY_TESTS = True
DISABLE_HEAVY_TESTS_MSG = "Se desactivaron los tests pesados"
VERBOSE = False
# Se cargan argumentos desde la consola
if __name__ == '__main__':
from bin.arguments import argument_parser_factory
argparser = argument_parser_factory("Template Test", verbose=True, version=True,
enable_skipped_test=True).parse_args()
DISABLE_HEAVY_TESTS = argparser.enableHeavyTest
VERBOSE = argparser.verbose
# Clase UnitTest
class ModuleTest(unittest.TestCase):
def setUp(self):
"""
Inicio de los test.
:return: void
:rtype: None
"""
pass
# noinspection PyMethodMayBeStatic
def testA(self):
"""
Ejemplo de test.
:return: void
:rtype: None
"""
pass
@unittest.skipIf(DISABLE_HEAVY_TESTS, DISABLE_HEAVY_TESTS_MSG)
def testSkipped(self):
"""
Ejemplo de test saltado.
:return: void
:rtype: None
"""
pass
# Main test
if __name__ == '__main__':
runner = unittest.TextTestRunner()
itersuite = unittest.TestLoader().loadTestsFromTestCase(ModuleTest)
runner.run(itersuite)
| ppizarror/korektor | test/_template.py | Python | gpl-2.0 | 1,642 | 0.00061 |
import os
import re
from setuptools import setup, find_packages
THIS_DIR = os.path.dirname(os.path.realpath(__name__))
def read(*parts):
with open(os.path.join(THIS_DIR, *parts)) as f:
return f.read()
def get_version():
return re.findall("__version__ = '([\d\.]+)'",
read('marionette', '__init__.py'), re.M)[0]
setup(name='marionette_client',
version=get_version(),
description="Marionette test automation client",
long_description='See http://marionette-client.readthedocs.org/',
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='mozilla',
author='Jonathan Griffin',
author_email='jgriffin@mozilla.com',
url='https://wiki.mozilla.org/Auto-tools/Projects/Marionette',
license='MPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
package_data={'marionette': ['touch/*.js']},
include_package_data=True,
zip_safe=False,
entry_points="""
# -*- Entry points: -*-
[console_scripts]
marionette = marionette.runtests:cli
""",
install_requires=read('requirements.txt').splitlines(),
)
| cstipkovic/spidermonkey-research | testing/marionette/harness/setup.py | Python | mpl-2.0 | 1,214 | 0.002471 |
import oauth2 as oauth
import sher.settings as settings
import cgi
import urlparse
import urllib
import gdata.youtube
import gdata.youtube.service
import twitter
class TwitterService(object):
def __init__(self, consumer_key, consumer_secret):
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
self.client = oauth.Client(self.consumer)
self.access_token_url = "https://api.twitter.com/oauth/access_token"
self.request_token_url = "https://api.twitter.com/oauth/request_token"
self.authorize_url = "https://api.twitter.com/oauth/authorize"
def get_request_token(self):
request_token_url = self.request_token_url
resp, content = self.client.request(request_token_url, "POST")
if resp['status'] != '200':
raise Exception("Invalid Response from Twitter")
request_token = dict(cgi.parse_qsl(content))
self.request_token = request_token['oauth_token']
self.request_token_secret = request_token['oauth_token_secret']
return self.request_token
def get_access_token(self, oauth_verifier):
access_token_url = self.access_token_url
token = oauth.Token(self.request_token, self.request_token_secret)
token.set_verifier(oauth_verifier)
client = oauth.Client(self.consumer, token)
resp, content = client.request(access_token_url, "POST")
if resp['status'] != '200':
raise Exception("Invalid Response from Twitter")
access_token = dict(cgi.parse_qsl(content))
self.access_token = access_token['oauth_token']
self.access_token_secret = access_token['oauth_token_secret']
return access_token
def get_oauth_url(self, request_token):
return "%s?oauth_token=%s" % (self.authorize_url, request_token)
def authenticated(self, account):
"""Return an authenticated twitter API instance (python-twitter)"""
return twitter.Api(consumer_key=self.consumer_key,
consumer_secret=self.consumer_secret,
access_token_key=account.oauth_token,
access_token_secret=account.oauth_secret)
twitter_service = TwitterService(settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET)
class YouTubeService(object):
def __init__(self, developer_key, client_id):
self.developer_key = developer_key
self.client_id = client_id
self.yt_service = gdata.youtube.service.YouTubeService()
def get_authsub_url(self, callback):
next = callback
scope = "http://gdata.youtube.com"
secure = False
session = True
return self.yt_service.GenerateAuthSubURL(next, scope, secure, session)
def upgrade_to_session(self, token):
"""
Takes an authsub token and upgrades to session token then returns that token for storing.
"""
self.yt_service.SetAuthSubToken(token)
self.yt_service.UpgradeToSessionToken()
return self.yt_service.GetAuthSubToken()
def authenticated(self, account):
self.yt_service.SetAuthSubToken(account.authsub_token)
self.yt_service.developer_key = self.developer_key
self.yt_service.client_id = self.client_id
return self.yt_service
youtube_service = YouTubeService(settings.YOUTUBE_DEVELOPER_KEY, settings.YOUTUBE_CLIENT_ID)
class FacebookService(object):
def __init__(self, app_id, app_key, app_secret):
self.app_id = app_id
self.app_key = app_key
self.app_secret = app_secret
def get_oauth_url(self):
"""Offline access gets a long-lasting token."""
return "https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=read_stream,publish_stream,offline_access"
def get_access_token_url(self, callback, code):
self.access_token_url = "https://graph.facebook.com/oauth/access_token?client_id=%s&redirect_uri=%s&client_secret=%s&code=%s" % (self.app_id, callback, self.app_secret, code)
return self.access_token_url
def authenticated(self, account):
from apis import facebook
graph = facebook.GraphAPI(account.oauth_token)
return graph
facebook_service = FacebookService(settings.FACEBOOK_APP_ID, settings.FACEBOOK_APP_KEY, settings.FACEBOOK_APP_SECRET)
class FlickrService(object):
def __init__(self, api_key, secret):
self.api_key = api_key
self.secret = secret
self.auth_url = "http://flickr.com/services/auth/?"
self.rest_url = "http://flickr.com/services/rest/?"
def gen_sig(self, base_url, **kwargs):
from md5 import md5
params = {}
for kwarg in kwargs:
params.update({kwarg: kwargs[kwarg]})
pkeys = params.keys()
pkeys.sort()
sigstring = self.secret + ""
for k in pkeys:
sigstring += k + str(params[k])
params['api_sig'] = md5(sigstring).hexdigest()
return base_url + urllib.urlencode(params)
def get_oauth_url(self):
"""Generates oauth url with 'delete' permission which provides both read and write permissions."""
url = self.gen_sig(self.auth_url, api_key=self.api_key, perms="delete")
return url
def get_auth_token(self, token):
"""Calls flickrs getToken to obtain a persistent auth token."""
url = self.gen_sig(self.rest_url, api_key=self.api_key, method="flickr.auth.getToken", frob=token)
return url
def authenticated(self, account, format="etree"):
import flickrapi
return flickrapi.FlickrAPI(settings.FLICKR_KEY, secret=settings.FLICKR_SECRET, token=account.oauth_token, format=format)
flickr_service = FlickrService(settings.FLICKR_KEY, settings.FLICKR_SECRET)
| viswimmer1/PythonGenerator | data/python_files/28964260/services.py | Python | gpl-2.0 | 5,901 | 0.002542 |
# PyVision License
#
# Copyright (c) 2006-2008 David S. Bolme
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither name of copyright holders nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import pyvision as pv
import numpy as np
class DetectorROI:
'''
This class defines an interface to a Region Of Interest (ROI) detector.
'''
def __init__(self,n=250,selector='bins',bin_size=50):
'''
n - is the approximate number of points requested.
bin_size - the width and height of each bin in pixels.
selector - ('all', 'bins', or 'best') stratagy for point selection.
When corner_selector is set to bins, the image is subdivided in to bins of
size <bin_size>X<bin_size> pixels and an equal number of points will be taken
from each of those bins. This insures that points are found in all parts of the
image not just where the corners are strongest.
'''
self.n = n
self.selector = selector
self.bin_size = bin_size
pass
def detect(self,image,**kwargs):
'''
Returns a list of region of interest. Each element in the list is a
tuple of (score,centerpoint,radius). Radius of "None" is used for point
detectors. Higher scores are better and scores of "None" indicate no
score is avalible.
'''
# TODO: Call subclass
A = None
if isinstance(image,pv.Image):
A = image.asMatrix2D()
elif isinstance(image,np.array) and len(image.shape)==2:
A = image
else:
raise TypeError("ERROR Unknown Type (%s) - Only arrays and pyvision images supported."%type(image))
L = self._detect(image,**kwargs)
L.sort()
L.reverse()
if self.selector == 'best':
L=L[:self.n]
elif self.selector == 'bins':
nbins = A.shape[0]/self.bin_size*A.shape[1]/self.bin_size
npts = self.n / nbins + 1
corners = []
for xmin in range(0,A.shape[0],self.bin_size):
xmax = xmin + self.bin_size
for ymin in range(0,A.shape[1],self.bin_size):
bin_data = []
ymax = ymin + self.bin_size
for each in L:
#print each
if xmin <= each[1] and each[1] < xmax and ymin <= each[2] and each[2] < ymax:
bin_data.append(each)
if len(bin_data) >= npts:
break
corners += bin_data
L = corners
else: # TODO: assume all
pass
roi = []
for each in L:
roi.append([each[0],pv.Point(each[1],each[2]),each[3]])
#L = concatenate((L.transpose,ones((1,L.shape[0]))))
return roi
def _detect(self):
raise NotImplementedError("This method should be overridden in a sub class.")
| svohara/pyvision | src/pyvision/point/DetectorROI.py | Python | bsd-3-clause | 4,461 | 0.011432 |
# -*- encoding: utf-8 -*-
"""Test class for Architecture UI"""
from fauxfactory import gen_string
from nailgun import entities
from robottelo.datafactory import generate_strings_list, invalid_values_list
from robottelo.decorators import run_only_on, tier1
from robottelo.test import UITestCase
from robottelo.ui.factory import make_arch
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
def valid_arch_os_names():
"""Returns a tuple of arch/os names for creation tests"""
return(
{u'name': gen_string('alpha'), u'os_name': gen_string('alpha')},
{u'name': gen_string('html'), u'os_name': gen_string('html')},
{u'name': gen_string('utf8'), u'os_name': gen_string('utf8')},
{u'name': gen_string('alphanumeric'),
u'os_name': gen_string('alphanumeric')}
)
class ArchitectureTestCase(UITestCase):
"""Implements Architecture tests from UI"""
@run_only_on('sat')
@tier1
def test_positive_create_with_os(self):
"""@Test: Create a new Architecture with OS
@Feature: Architecture - Positive Create
@Assert: Architecture is created
"""
with Session(self.browser) as session:
for test_data in valid_arch_os_names():
with self.subTest(test_data):
entities.OperatingSystem(
name=test_data['os_name']).create()
make_arch(session, name=test_data['name'],
os_names=[test_data['os_name']])
self.assertIsNotNone(
self.architecture.search(test_data['name']))
@run_only_on('sat')
@tier1
def test_positive_create_with_name(self):
"""@Test: Create a new Architecture with different data
@Feature: Architecture - Positive Create
@Assert: Architecture is created
"""
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
make_arch(session, name=name)
self.assertIsNotNone(self.architecture.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_name(self):
"""@Test: Try to create architecture and use whitespace, blank, tab
symbol or too long string of different types as its name value
@Feature: Architecture - Negative Create
@Assert: Architecture is not created
"""
with Session(self.browser) as session:
for invalid_name in invalid_values_list(interface='ui'):
with self.subTest(invalid_name):
make_arch(session, name=invalid_name)
self.assertIsNotNone(self.architecture.wait_until_element(
common_locators['name_haserror']))
@run_only_on('sat')
@tier1
def test_negative_create_with_same_name(self):
"""@Test: Create a new Architecture with same name
@Feature: Architecture - Negative Create
@Assert: Architecture is not created
"""
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
make_arch(session, name=name)
self.assertIsNotNone(self.architecture.search(name))
make_arch(session, name=name)
self.assertIsNotNone(self.architecture.wait_until_element(
common_locators['name_haserror']))
@run_only_on('sat')
@tier1
def test_positive_delete(self):
"""@Test: Delete an existing Architecture
@Feature: Architecture - Delete
@Assert: Architecture is deleted
"""
os = entities.OperatingSystem(name=gen_string('alpha')).create()
with Session(self.browser) as session:
for name in generate_strings_list():
with self.subTest(name):
entities.Architecture(
name=name, operatingsystem=[os]).create()
session.nav.go_to_architectures()
self.architecture.delete(name)
@run_only_on('sat')
@tier1
def test_positive_update_name_and_os(self):
"""@Test: Update Architecture with new name and OS
@Feature: Architecture - Update
@Assert: Architecture is updated
"""
old_name = gen_string('alpha')
with Session(self.browser) as session:
make_arch(session, name=old_name)
self.assertIsNotNone(self.architecture.search(old_name))
for new_name in generate_strings_list():
with self.subTest(new_name):
os_name = gen_string('alpha')
entities.OperatingSystem(name=os_name).create()
self.architecture.update(
old_name, new_name, new_os_names=[os_name])
self.assertIsNotNone(self.architecture.search(new_name))
old_name = new_name # for next iteration
| tkolhar/robottelo | tests/foreman/ui/test_architecture.py | Python | gpl-3.0 | 5,128 | 0 |
from Tools.Profile import profile
profile("LOAD:ElementTree")
import xml.etree.cElementTree
import os
profile("LOAD:enigma_skin")
from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, addFont, gRGB, eWindowStyleSkinned, getDesktop
from Components.config import ConfigSubsection, ConfigText, config
from Components.Converter.Converter import Converter
from Components.Sources.Source import Source, ObsoleteSource
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_FONTS, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists, SCOPE_SKIN_IMAGE
from Tools.Import import my_import
from Tools.LoadPixmap import LoadPixmap
from Components.RcModel import rc_model
from Components.SystemInfo import SystemInfo
colorNames = {}
# Predefined fonts, typically used in built-in screens and for components like
# the movie list and so.
fonts = {
"Body": ("Regular", 18, 22, 16),
"ChoiceList": ("Regular", 20, 24, 18),
}
parameters = {}
def dump(x, i=0):
print " " * i + str(x)
try:
for n in x.childNodes:
dump(n, i + 1)
except:
None
class SkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "{%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg)
dom_skins = [ ]
def addSkin(name, scope = SCOPE_SKIN):
# read the skin
filename = resolveFilename(scope, name)
if fileExists(filename):
mpath = os.path.dirname(filename) + "/"
try:
dom_skins.append((mpath, xml.etree.cElementTree.parse(filename).getroot()))
except:
print "[SKIN ERROR] error in %s" % filename
return False
else:
return True
return False
# get own skin_user_skinname.xml file, if exist
def skin_user_skinname():
name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml"
filename = resolveFilename(SCOPE_CONFIG, name)
if fileExists(filename):
return name
return None
# we do our best to always select the "right" value
# skins are loaded in order of priority: skin with
# highest priority is loaded last, usually the user-provided
# skin.
# currently, loadSingleSkinData (colors, bordersets etc.)
# are applied one-after-each, in order of ascending priority.
# the dom_skin will keep all screens in descending priority,
# so the first screen found will be used.
# example: loadSkin("nemesis_greenline/skin.xml")
config.skin = ConfigSubsection()
DEFAULT_SKIN = SystemInfo["HasFullHDSkinSupport"] and "PLi-FullNightHD/skin.xml" or "PLi-HD/skin.xml"
# on SD hardware, PLi-HD will not be available
if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)):
# in that case, fallback to Magic (which is an SD skin)
DEFAULT_SKIN = "Magic/skin.xml"
if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)):
DEFAULT_SKIN = "skin.xml"
config.skin.primary_skin = ConfigText(default=DEFAULT_SKIN)
profile("LoadSkin")
res = None
name = skin_user_skinname()
if name:
res = addSkin(name, SCOPE_CONFIG)
if not name or not res:
addSkin('skin_user.xml', SCOPE_CONFIG)
# some boxes lie about their dimensions
addSkin('skin_box.xml')
# add optional discrete second infobar
addSkin('skin_second_infobar.xml')
display_skin_id = 1
addSkin('skin_display.xml')
addSkin('skin_text.xml')
addSkin('skin_subtitles.xml')
try:
if not addSkin(config.skin.primary_skin.value):
raise SkinError, "primary skin not found"
except Exception, err:
print "SKIN ERROR:", err
skin = DEFAULT_SKIN
if config.skin.primary_skin.value == skin:
skin = 'skin.xml'
print "defaulting to standard skin...", skin
config.skin.primary_skin.value = skin
addSkin(skin)
del skin
addSkin('skin_default.xml')
profile("LoadSkinDefaultDone")
#
# Convert a string into a number. Used to convert object position and size attributes into a number
# s is the input string.
# e is the the parent object size to do relative calculations on parent
# size is the size of the object size (e.g. width or height)
# font is a font object to calculate relative to font sizes
# Note some constructs for speeding # up simple cases that are very common.
# Can do things like: 10+center-10w+4%
# To center the widget on the parent widget,
# but move forward 10 pixels and 4% of parent width
# and 10 character widths backward
# Multiplication, division and subexprsssions are also allowed: 3*(e-c/2)
#
# Usage: center : center the object on parent based on parent size and object size
# e : take the parent size/width
# c : take the center point of parent size/width
# % : take given percentag of parent size/width
# w : multiply by current font width
# h : multiply by current font height
#
def parseCoordinate(s, e, size=0, font=None):
s = s.strip()
if s == "center": # for speed, can be common case
val = (e - size)/2
elif s == '*':
return None
else:
try:
val = int(s) # for speed
except:
if 't' in s:
s = s.replace("center", str((e-size)/2.0))
if 'e' in s:
s = s.replace("e", str(e))
if 'c' in s:
s = s.replace("c", str(e/2.0))
if 'w' in s:
s = s.replace("w", "*" + str(fonts[font][3]))
if 'h' in s:
s = s.replace("h", "*" + str(fonts[font][2]))
if '%' in s:
s = s.replace("%", "*" + str(e/100.0))
try:
val = int(s) # for speed
except:
val = eval(s)
if val < 0:
return 0
return int(val) # make sure an integer value is returned
def getParentSize(object, desktop):
size = eSize()
if object:
parent = object.getParent()
# For some widgets (e.g. ScrollLabel) the skin attributes are applied to
# a child widget, instead of to the widget itself. In that case, the parent
# we have here is not the real parent, but it is the main widget.
# We have to go one level higher to get the actual parent.
# We can detect this because the 'parent' will not have a size yet
# (the main widget's size will be calculated internally, as soon as the child
# widget has parsed the skin attributes)
if parent and parent.size().isEmpty():
parent = parent.getParent()
if parent:
size = parent.size()
elif desktop:
#widget has no parent, use desktop size instead for relative coordinates
size = desktop.size()
return size
def parseValuePair(s, scale, object = None, desktop = None, size = None):
x, y = s.split(',')
parentsize = eSize()
if object and ('c' in x or 'c' in y or 'e' in x or 'e' in y or
'%' in x or '%' in y): # need parent size for ce%
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width(), size and size.width() or 0)
yval = parseCoordinate(y, parentsize.height(), size and size.height() or 0)
return (xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parsePosition(s, scale, object = None, desktop = None, size = None):
(x, y) = parseValuePair(s, scale, object, desktop, size)
return ePoint(x, y)
def parseSize(s, scale, object = None, desktop = None):
(x, y) = parseValuePair(s, scale, object, desktop)
return eSize(x, y)
def parseFont(s, scale):
try:
f = fonts[s]
name = f[0]
size = f[1]
except:
name, size = s.split(';')
return gFont(name, int(size) * scale[0][0] / scale[0][1])
def parseColor(s):
if s[0] != '#':
try:
return colorNames[s]
except:
raise SkinError("color '%s' must be #aarrggbb or valid named color" % s)
return gRGB(int(s[1:], 0x10))
def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))):
# walk all attributes
size = None
pos = None
font = None
for attrib, value in node.items():
if attrib not in ignore:
if attrib in filenames:
value = resolveFilename(SCOPE_CURRENT_SKIN, value, path_prefix=skin_path_prefix)
# Bit of a hack this, really. When a window has a flag (e.g. wfNoBorder)
# it needs to be set at least before the size is set, in order for the
# window dimensions to be calculated correctly in all situations.
# If wfNoBorder is applied after the size has been set, the window will fail to clear the title area.
# Similar situation for a scrollbar in a listbox; when the scrollbar setting is applied after
# the size, a scrollbar will not be shown until the selection moves for the first time
if attrib == 'size':
size = value.encode("utf-8")
elif attrib == 'position':
pos = value.encode("utf-8")
elif attrib == 'font':
font = value.encode("utf-8")
skinAttributes.append((attrib, font))
else:
skinAttributes.append((attrib, value.encode("utf-8")))
if pos is not None:
pos, size = context.parse(pos, size, font)
skinAttributes.append(('position', pos))
if size is not None:
skinAttributes.append(('size', size))
def morphRcImagePath(value):
if rc_model.rcIsDefault() is False:
if value == '/usr/share/enigma2/skin_default/rc.png' or value == '/usr/share/enigma2/skin_default/rcold.png':
value = rc_model.getRcImg()
return value
def loadPixmap(path, desktop):
option = path.find("#")
if option != -1:
path = path[:option]
ptr = LoadPixmap(morphRcImagePath(path), desktop)
if ptr is None:
raise SkinError("pixmap file %s not found!" % path)
return ptr
class AttributeParser:
def __init__(self, guiObject, desktop, scale=((1,1),(1,1))):
self.guiObject = guiObject
self.desktop = desktop
self.scaleTuple = scale
def applyOne(self, attrib, value):
try:
getattr(self, attrib)(value)
except AttributeError:
print "[Skin] Attribute not implemented:", attrib, "value:", value
except SkinError, ex:
print "[Skin] Error:", ex
def applyAll(self, attrs):
for attrib, value in attrs:
self.applyOne(attrib, value)
def conditional(self, value):
pass
def position(self, value):
if isinstance(value, tuple):
self.guiObject.move(ePoint(*value))
else:
self.guiObject.move(parsePosition(value, self.scaleTuple, self.guiObject, self.desktop, self.guiObject.csize()))
def size(self, value):
if isinstance(value, tuple):
self.guiObject.resize(eSize(*value))
else:
self.guiObject.resize(parseSize(value, self.scaleTuple, self.guiObject, self.desktop))
def title(self, value):
self.guiObject.setTitle(_(value))
def text(self, value):
self.guiObject.setText(_(value))
def font(self, value):
self.guiObject.setFont(parseFont(value, self.scaleTuple))
def zPosition(self, value):
self.guiObject.setZPosition(int(value))
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setPixmap(ptr)
def backgroundPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setBackgroundPicture(ptr)
def selectionPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSelectionPicture(ptr)
def sliderPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSliderPicture(ptr)
def scrollbarbackgroundPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def alphatest(self, value):
self.guiObject.setAlphatest(
{ "on": 1,
"off": 0,
"blend": 2,
}[value])
def scale(self, value):
self.guiObject.setScale(1)
def orientation(self, value): # used by eSlider
try:
self.guiObject.setOrientation(*
{ "orVertical": (self.guiObject.orVertical, False),
"orTopToBottom": (self.guiObject.orVertical, False),
"orBottomToTop": (self.guiObject.orVertical, True),
"orHorizontal": (self.guiObject.orHorizontal, False),
"orLeftToRight": (self.guiObject.orHorizontal, False),
"orRightToLeft": (self.guiObject.orHorizontal, True),
}[value])
except KeyError:
print "oprientation must be either orVertical or orHorizontal!"
def valign(self, value):
try:
self.guiObject.setVAlign(
{ "top": self.guiObject.alignTop,
"center": self.guiObject.alignCenter,
"bottom": self.guiObject.alignBottom
}[value])
except KeyError:
print "valign must be either top, center or bottom!"
def halign(self, value):
try:
self.guiObject.setHAlign(
{ "left": self.guiObject.alignLeft,
"center": self.guiObject.alignCenter,
"right": self.guiObject.alignRight,
"block": self.guiObject.alignBlock
}[value])
except KeyError:
print "halign must be either left, center, right or block!"
def textOffset(self, value):
x, y = value.split(',')
self.guiObject.setTextOffset(ePoint(int(x) * self.scaleTuple[0][0] / self.scaleTuple[0][1], int(y) * self.scaleTuple[1][0] / self.scaleTuple[1][1]))
def flags(self, value):
flags = value.split(',')
for f in flags:
try:
fv = eWindow.__dict__[f]
self.guiObject.setFlag(fv)
except KeyError:
print "illegal flag %s!" % f
def backgroundColor(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def backgroundColorSelected(self, value):
self.guiObject.setBackgroundColorSelected(parseColor(value))
def foregroundColor(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def foregroundColorSelected(self, value):
self.guiObject.setForegroundColorSelected(parseColor(value))
def shadowColor(self, value):
self.guiObject.setShadowColor(parseColor(value))
def selectionDisabled(self, value):
self.guiObject.setSelectionEnable(0)
def transparent(self, value):
self.guiObject.setTransparent(int(value))
def borderColor(self, value):
self.guiObject.setBorderColor(parseColor(value))
def borderWidth(self, value):
self.guiObject.setBorderWidth(int(value))
def scrollbarMode(self, value):
self.guiObject.setScrollbarMode(getattr(self.guiObject, value))
# { "showOnDemand": self.guiObject.showOnDemand,
# "showAlways": self.guiObject.showAlways,
# "showNever": self.guiObject.showNever,
# "showLeft": self.guiObject.showLeft
# }[value])
def enableWrapAround(self, value):
self.guiObject.setWrapAround(True)
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(0, ptr, pos)
def seek_pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(1, ptr, pos)
def shadowOffset(self, value):
self.guiObject.setShadowOffset(parsePosition(value, self.scaleTuple))
def noWrap(self, value):
self.guiObject.setNoWrap(1)
def applySingleAttribute(guiObject, desktop, attrib, value, scale = ((1,1),(1,1))):
# Someone still using applySingleAttribute?
AttributeParser(guiObject, desktop, scale).applyOne(attrib, value)
def applyAllAttributes(guiObject, desktop, attributes, scale):
AttributeParser(guiObject, desktop, scale).applyAll(attributes)
def loadSingleSkinData(desktop, skin, path_prefix):
"""loads skin data like colors, windowstyle etc."""
assert skin.tag == "skin", "root element in skin must be 'skin'!"
for c in skin.findall("output"):
id = c.attrib.get('id')
if id:
id = int(id)
else:
id = 0
if id == 0: # framebuffer
for res in c.findall("resolution"):
get_attr = res.attrib.get
xres = get_attr("xres")
if xres:
xres = int(xres)
else:
xres = 720
yres = get_attr("yres")
if yres:
yres = int(yres)
else:
yres = 576
bpp = get_attr("bpp")
if bpp:
bpp = int(bpp)
else:
bpp = 32
#print "Resolution:", xres,yres,bpp
from enigma import gMainDC
gMainDC.getInstance().setResolution(xres, yres)
desktop.resize(eSize(xres, yres))
if bpp != 32:
# load palette (not yet implemented)
pass
if yres >= 1080:
parameters["FileListName"] = (68,4,1000,34)
parameters["FileListIcon"] = (7,4,52,37)
parameters["FileListMultiName"] = (90,3,1000,32)
parameters["FileListMultiIcon"] = (45, 4, 30, 30)
parameters["FileListMultiLock"] = (2,0,36,36)
parameters["ChoicelistDash"] = (0,3,1000,30)
parameters["ChoicelistName"] = (68,3,1000,30)
parameters["ChoicelistIcon"] = (7,0,52,38)
parameters["PluginBrowserName"] = (180,8,38)
parameters["PluginBrowserDescr"] = (180,42,25)
parameters["PluginBrowserIcon"] = (15,8,150,60)
parameters["PluginBrowserDownloadName"] = (120,8,38)
parameters["PluginBrowserDownloadDescr"] = (120,42,25)
parameters["PluginBrowserDownloadIcon"] = (15,0,90,76)
parameters["ServiceInfo"] = (0,0,450,50)
parameters["ServiceInfoLeft"] = (0,0,450,45)
parameters["ServiceInfoRight"] = (450,0,1000,45)
parameters["SelectionListDescr"] = (45,3,1000,32)
parameters["SelectionListLock"] = (0,2,36,36)
parameters["ConfigListSeperator"] = 300
parameters["VirtualKeyboard"] = (68,68)
parameters["PartnerBoxEntryListName"] = (8,2,225,38)
parameters["PartnerBoxEntryListIP"] = (180,2,225,38)
parameters["PartnerBoxEntryListPort"] = (405,2,150,38)
parameters["PartnerBoxEntryListType"] = (615,2,150,38)
parameters["PartnerBoxTimerServicename"] = (0,0,45)
parameters["PartnerBoxTimerName"] = (0,42,30)
parameters["PartnerBoxE1TimerTime"] = (0,78,255,30)
parameters["PartnerBoxE1TimerState"] = (255,78,255,30)
parameters["PartnerBoxE2TimerTime"] = (0,78,225,30)
parameters["PartnerBoxE2TimerState"] = (225,78,225,30)
parameters["PartnerBoxE2TimerIcon"] = (1050,8,20,20)
parameters["PartnerBoxE2TimerIconRepeat"] = (1050,38,20,20)
parameters["PartnerBoxBouquetListName"] = (0,0,45)
parameters["PartnerBoxChannelListName"] = (0,0,45)
parameters["PartnerBoxChannelListTitle"] = (0,42,30)
parameters["PartnerBoxChannelListTime"] = (0,78,225,30)
parameters["HelpMenuListHlp"] = (0,0,900,42)
parameters["HelpMenuListExtHlp0"] = (0,0,900,39)
parameters["HelpMenuListExtHlp1"] = (0,42,900,30)
parameters["AboutHddSplit"] = 1
parameters["DreamexplorerName"] = (62,0,1200,38)
parameters["DreamexplorerIcon"] = (15,4,30,30)
parameters["PicturePlayerThumb"] = (30,285,45,300,30,25)
parameters["PlayListName"] = (38,2,1000,34)
parameters["PlayListIcon"] = (7,7,24,24)
parameters["SHOUTcastListItem"] = (30,27,35,96,35,33,60,32)
for skininclude in skin.findall("include"):
filename = skininclude.attrib.get("filename")
if filename:
skinfile = resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix)
if not fileExists(skinfile):
skinfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
if fileExists(skinfile):
print "[SKIN] loading include:", skinfile
loadSkin(skinfile)
for c in skin.findall("colors"):
for color in c.findall("color"):
get_attr = color.attrib.get
name = get_attr("name")
color = get_attr("value")
if name and color:
colorNames[name] = parseColor(color)
#print "Color:", name, color
else:
raise SkinError("need color and name, got %s %s" % (name, color))
for c in skin.findall("fonts"):
for font in c.findall("font"):
get_attr = font.attrib.get
filename = get_attr("filename", "<NONAME>")
name = get_attr("name", "Regular")
scale = get_attr("scale")
if scale:
scale = int(scale)
else:
scale = 100
is_replacement = get_attr("replacement") and True or False
render = get_attr("render")
if render:
render = int(render)
else:
render = 0
resolved_font = resolveFilename(SCOPE_FONTS, filename, path_prefix=path_prefix)
if not fileExists(resolved_font): #when font is not available look at current skin path
skin_path = resolveFilename(SCOPE_CURRENT_SKIN, filename)
if fileExists(skin_path):
resolved_font = skin_path
addFont(resolved_font, name, scale, is_replacement, render)
#print "Font: ", resolved_font, name, scale, is_replacement
for alias in c.findall("alias"):
get = alias.attrib.get
try:
name = get("name")
font = get("font")
size = int(get("size"))
height = int(get("height", size)) # to be calculated some day
width = int(get("width", size))
global fonts
fonts[name] = (font, size, height, width)
except Exception, ex:
print "[SKIN] bad font alias", ex
for c in skin.findall("parameters"):
for parameter in c.findall("parameter"):
get = parameter.attrib.get
try:
name = get("name")
value = get("value")
parameters[name] = "," in value and map(int, value.split(",")) or int(value)
except Exception, ex:
print "[SKIN] bad parameter", ex
for c in skin.findall("subtitles"):
from enigma import eSubtitleWidget
scale = ((1,1),(1,1))
for substyle in c.findall("sub"):
get_attr = substyle.attrib.get
font = parseFont(get_attr("font"), scale)
col = get_attr("foregroundColor")
if col:
foregroundColor = parseColor(col)
haveColor = 1
else:
foregroundColor = gRGB(0xFFFFFF)
haveColor = 0
col = get_attr("borderColor")
if col:
borderColor = parseColor(col)
else:
borderColor = gRGB(0)
borderwidth = get_attr("borderWidth")
if borderwidth is None:
# default: use a subtitle border
borderWidth = 3
else:
borderWidth = int(borderwidth)
face = eSubtitleWidget.__dict__[get_attr("name")]
eSubtitleWidget.setFontStyle(face, font, haveColor, foregroundColor, borderColor, borderWidth)
for windowstyle in skin.findall("windowstyle"):
style = eWindowStyleSkinned()
style_id = windowstyle.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
# defaults
font = gFont("Regular", 20)
offset = eSize(20, 5)
for title in windowstyle.findall("title"):
get_attr = title.attrib.get
offset = parseSize(get_attr("offset"), ((1,1),(1,1)))
font = parseFont(get_attr("font"), ((1,1),(1,1)))
style.setTitleFont(font);
style.setTitleOffset(offset)
#print " ", font, offset
for borderset in windowstyle.findall("borderset"):
bsName = str(borderset.attrib.get("name"))
for pixmap in borderset.findall("pixmap"):
get_attr = pixmap.attrib.get
bpName = get_attr("pos")
filename = get_attr("filename")
if filename and bpName:
png = loadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix), desktop)
style.setPixmap(eWindowStyleSkinned.__dict__[bsName], eWindowStyleSkinned.__dict__[bpName], png)
#print " borderset:", bpName, filename
for color in windowstyle.findall("color"):
get_attr = color.attrib.get
colorType = get_attr("name")
color = parseColor(get_attr("color"))
try:
style.setColor(eWindowStyleSkinned.__dict__["col" + colorType], color)
except:
raise SkinError("Unknown color %s" % colorType)
#pass
#print " color:", type, color
x = eWindowStyleManager.getInstance()
x.setStyle(style_id, style)
for margin in skin.findall("margin"):
style_id = margin.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
r = eRect(0,0,0,0)
v = margin.attrib.get("left")
if v:
r.setLeft(int(v))
v = margin.attrib.get("top")
if v:
r.setTop(int(v))
v = margin.attrib.get("right")
if v:
r.setRight(int(v))
v = margin.attrib.get("bottom")
if v:
r.setBottom(int(v))
# the "desktop" parameter is hardcoded to the UI screen, so we must ask
# for the one that this actually applies to.
getDesktop(style_id).setMargins(r)
dom_screens = {}
def loadSkin(name, scope = SCOPE_SKIN):
# Now a utility for plugins to add skin data to the screens
global dom_screens, display_skin_id
filename = resolveFilename(scope, name)
if fileExists(filename):
path = os.path.dirname(filename) + "/"
for elem in xml.etree.cElementTree.parse(filename).getroot():
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
print "loadSkin: Screen already defined elsewhere:", name
elem.clear()
else:
dom_screens[name] = (elem, path)
else:
elem.clear()
else:
elem.clear()
def loadSkinData(desktop):
# Kinda hackish, but this is called once by mytest.py
global dom_skins
skins = dom_skins[:]
skins.reverse()
for (path, dom_skin) in skins:
loadSingleSkinData(desktop, dom_skin, path)
for elem in dom_skin:
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
# Kill old versions, save memory
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
# without name, it's useless!
elem.clear()
else:
# non-screen element, no need for it any longer
elem.clear()
# no longer needed, we know where the screens are now.
del dom_skins
class additionalWidget:
pass
# Class that makes a tuple look like something else. Some plugins just assume
# that size is a string and try to parse it. This class makes that work.
class SizeTuple(tuple):
def split(self, *args):
return (str(self[0]), str(self[1]))
def strip(self, *args):
return '%s,%s' % self
def __str__(self):
return '%s,%s' % self
class SkinContext:
def __init__(self, parent=None, pos=None, size=None, font=None):
if parent is not None:
if pos is not None:
pos, size = parent.parse(pos, size, font)
self.x, self.y = pos
self.w, self.h = size
else:
self.x = None
self.y = None
self.w = None
self.h = None
def __str__(self):
return "Context (%s,%s)+(%s,%s) " % (self.x, self.y, self.w, self.h)
def parse(self, pos, size, font):
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
self.w = 0
self.h = 0
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
self.h -= h
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
self.h -= h
self.y += h
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
self.x += w
self.w -= w
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
self.w -= w
else:
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return (SizeTuple(pos), SizeTuple(size))
class SkinContextStack(SkinContext):
# A context that stacks things instead of aligning them
def parse(self, pos, size, font):
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
else:
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return (SizeTuple(pos), SizeTuple(size))
def readSkin(screen, skin, names, desktop):
if not isinstance(names, list):
names = [names]
# try all skins, first existing one have priority
global dom_screens
for n in names:
myscreen, path = dom_screens.get(n, (None,None))
if myscreen is not None:
# use this name for debug output
name = n
break
else:
name = "<embedded-in-'%s'>" % screen.__class__.__name__
# otherwise try embedded skin
if myscreen is None:
myscreen = getattr(screen, "parsedSkin", None)
# try uncompiled embedded skin
if myscreen is None and getattr(screen, "skin", None):
skin = screen.skin
print "[SKIN] Parsing embedded skin", name
if isinstance(skin, tuple):
for s in skin:
candidate = xml.etree.cElementTree.fromstring(s)
if candidate.tag == 'screen':
sid = candidate.attrib.get('id', None)
if (not sid) or (int(sid) == display_skin_id):
myscreen = candidate
break;
else:
print "[SKIN] Hey, no suitable screen!"
else:
myscreen = xml.etree.cElementTree.fromstring(skin)
if myscreen:
screen.parsedSkin = myscreen
if myscreen is None:
print "[SKIN] No skin to read..."
myscreen = screen.parsedSkin = xml.etree.cElementTree.fromstring("<screen></screen>")
screen.skinAttributes = [ ]
skin_path_prefix = getattr(screen, "skin_path", path)
context = SkinContextStack()
s = desktop.bounds()
context.x = s.left()
context.y = s.top()
context.w = s.width()
context.h = s.height()
del s
collectAttributes(screen.skinAttributes, myscreen, context, skin_path_prefix, ignore=("name",))
context = SkinContext(context, myscreen.attrib.get('position'), myscreen.attrib.get('size'))
screen.additionalWidgets = [ ]
screen.renderer = [ ]
visited_components = set()
# now walk all widgets and stuff
def process_none(widget, context):
pass
def process_widget(widget, context):
get_attr = widget.attrib.get
# ok, we either have 1:1-mapped widgets ('old style'), or 1:n-mapped
# widgets (source->renderer).
wname = get_attr('name')
wsource = get_attr('source')
if wname is None and wsource is None:
print "widget has no name and no source!"
return
if wname:
#print "Widget name=", wname
visited_components.add(wname)
# get corresponding 'gui' object
try:
attributes = screen[wname].skinAttributes = [ ]
except:
raise SkinError("component with name '" + wname + "' was not found in skin of screen '" + name + "'!")
# assert screen[wname] is not Source
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('name',))
elif wsource:
# get corresponding source
#print "Widget source=", wsource
while True: # until we found a non-obsolete source
# parse our current "wsource", which might specifiy a "related screen" before the dot,
# for example to reference a parent, global or session-global screen.
scr = screen
# resolve all path components
path = wsource.split('.')
while len(path) > 1:
scr = screen.getRelatedScreen(path[0])
if scr is None:
#print wsource
#print name
raise SkinError("specified related screen '" + wsource + "' was not found in screen '" + name + "'!")
path = path[1:]
# resolve the source.
source = scr.get(path[0])
if isinstance(source, ObsoleteSource):
# however, if we found an "obsolete source", issue warning, and resolve the real source.
print "WARNING: SKIN '%s' USES OBSOLETE SOURCE '%s', USE '%s' INSTEAD!" % (name, wsource, source.new_source)
print "OBSOLETE SOURCE WILL BE REMOVED %s, PLEASE UPDATE!" % (source.removal_date)
if source.description:
print source.description
wsource = source.new_source
else:
# otherwise, use that source.
break
if source is None:
raise SkinError("source '" + wsource + "' was not found in screen '" + name + "'!")
wrender = get_attr('render')
if not wrender:
raise SkinError("you must define a renderer with render= for source '%s'" % wsource)
for converter in widget.findall("convert"):
ctype = converter.get('type')
assert ctype, "'convert'-tag needs a 'type'-attribute"
#print "Converter:", ctype
try:
parms = converter.text.strip()
except:
parms = ""
#print "Params:", parms
converter_class = my_import('.'.join(("Components", "Converter", ctype))).__dict__.get(ctype)
c = None
for i in source.downstream_elements:
if isinstance(i, converter_class) and i.converter_arguments == parms:
c = i
if c is None:
c = converter_class(parms)
c.connect(source)
source = c
renderer_class = my_import('.'.join(("Components", "Renderer", wrender))).__dict__.get(wrender)
renderer = renderer_class() # instantiate renderer
renderer.connect(source) # connect to source
attributes = renderer.skinAttributes = [ ]
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('render', 'source'))
screen.renderer.append(renderer)
def process_applet(widget, context):
try:
codeText = widget.text.strip()
widgetType = widget.attrib.get('type')
code = compile(codeText, "skin applet", "exec")
except Exception, ex:
raise SkinError("applet failed to compile: " + str(ex))
if widgetType == "onLayoutFinish":
screen.onLayoutFinish.append(code)
else:
raise SkinError("applet type '%s' unknown!" % widgetType)
def process_elabel(widget, context):
w = additionalWidget()
w.widget = eLabel
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_epixmap(widget, context):
w = additionalWidget()
w.widget = ePixmap
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_screen(widget, context):
for w in widget.getchildren():
conditional = w.attrib.get('conditional')
if conditional and not [i for i in conditional.split(",") if i in screen.keys()]:
continue
p = processors.get(w.tag, process_none)
try:
p(w, context)
except SkinError, e:
print "[Skin] SKIN ERROR in screen '%s' widget '%s':" % (name, w.tag), e
def process_panel(widget, context):
n = widget.attrib.get('name')
if n:
try:
s = dom_screens[n]
except KeyError:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
else:
process_screen(s[0], context)
layout = widget.attrib.get('layout')
if layout == 'stack':
cc = SkinContextStack
else:
cc = SkinContext
try:
c = cc(context, widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'))
except Exception, ex:
raise SkinError("Failed to create skincontext (%s,%s,%s) in %s: %s" % (widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'), context, ex) )
process_screen(widget, c)
processors = {
None: process_none,
"widget": process_widget,
"applet": process_applet,
"eLabel": process_elabel,
"ePixmap": process_epixmap,
"panel": process_panel
}
try:
context.x = 0 # reset offsets, all components are relative to screen
context.y = 0 # coordinates.
process_screen(myscreen, context)
except Exception, e:
print "[Skin] SKIN ERROR in %s:" % name, e
from Components.GUIComponent import GUIComponent
nonvisited_components = [x for x in set(screen.keys()) - visited_components if isinstance(x, GUIComponent)]
assert not nonvisited_components, "the following components in %s don't have a skin entry: %s" % (name, ', '.join(nonvisited_components))
# This may look pointless, but it unbinds 'screen' from the nested scope. A better
# solution is to avoid the nested scope above and use the context object to pass
# things around.
screen = None
visited_components = None
| isslayne/enigma2 | skin.py | Python | gpl-2.0 | 35,736 | 0.035678 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Functions related to blackbody radiation."""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# LOCAL
from ..modeling import blackbody as _bb
from ..utils.decorators import deprecated
__all__ = ['blackbody_nu', 'blackbody_lambda']
# Units
FNU = _bb.FNU
FLAM = _bb.FLAM
@deprecated('2.0', alternative='astropy.modeling.blackbody.blackbody_nu')
def blackbody_nu(in_x, temperature):
"""Calculate blackbody flux per steradian, :math:`B_{\\nu}(T)`.
.. note::
Use `numpy.errstate` to suppress Numpy warnings, if desired.
.. warning::
Output values might contain ``nan`` and ``inf``.
Parameters
----------
in_x : number, array-like, or `~astropy.units.Quantity`
Frequency, wavelength, or wave number.
If not a Quantity, it is assumed to be in Hz.
temperature : number, array-like, or `~astropy.units.Quantity`
Blackbody temperature.
If not a Quantity, it is assumed to be in Kelvin.
Returns
-------
flux : `~astropy.units.Quantity`
Blackbody monochromatic flux in
:math:`erg \\; cm^{-2} s^{-1} Hz^{-1} sr^{-1}`.
Raises
------
ValueError
Invalid temperature.
ZeroDivisionError
Wavelength is zero (when converting to frequency).
"""
return _bb.blackbody_nu(in_x, temperature)
@deprecated('2.0', alternative='astropy.modeling.blackbody.blackbody_lambda')
def blackbody_lambda(in_x, temperature):
"""Like :func:`blackbody_nu` but for :math:`B_{\\lambda}(T)`.
Parameters
----------
in_x : number, array-like, or `~astropy.units.Quantity`
Frequency, wavelength, or wave number.
If not a Quantity, it is assumed to be in Angstrom.
temperature : number, array-like, or `~astropy.units.Quantity`
Blackbody temperature.
If not a Quantity, it is assumed to be in Kelvin.
Returns
-------
flux : `~astropy.units.Quantity`
Blackbody monochromatic flux in
:math:`erg \\; cm^{-2} s^{-1} \\mathring{A}^{-1} sr^{-1}`.
"""
return _bb.blackbody_lambda(in_x, temperature)
| AustereCuriosity/astropy | astropy/analytic_functions/blackbody.py | Python | bsd-3-clause | 2,225 | 0 |
class ValueWrapper(object):
xmlEntry = None # Processdata sets this every time before extracting a new Entry.
idcounter = 1000 # class variable to generate
@staticmethod
def reset_id_counter():
ValueWrapper.idcounter = 1000
def __init__(self, val):
self._value = val
self.id = "t" + str(ValueWrapper.idcounter)
self.manuallyEdited = False
self.error = False
if ValueWrapper.xmlEntry is not None and self.id in ValueWrapper.xmlEntry.attrib:
# there is manual entered value for this field in xml, use it instead
self._value = ValueWrapper.xmlEntry.attrib[self.id]
self.manuallyEdited = True
ValueWrapper.idcounter += 1
def manualEdit(self, val):
"""
:param val: Meant to manually edit the value from GUI.
:return:
"""
self._value = val
self.manuallyEdited = True
@property
def value(self):
return self._value
@value.setter
def value(self, value):
if not self.manuallyEdited:
self._value = value
| Learning-from-our-past/Kaira | interface/valuewrapper.py | Python | gpl-2.0 | 1,114 | 0.002693 |
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Merge the PGC files generated during the profiling step to the PGD database.
This is required to workaround a flakyness in pgomgr.exe where it can run out
of address space while trying to merge all the PGC files at the same time.
"""
import glob
import json
import optparse
import os
import subprocess
import sys
script_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(script_dir, os.pardir))
import vs_toolchain
# Number of PGC files that should be merged in each iteration, merging all
# the files one by one is really slow but merging more than 10 at a time doesn't
# really seem to impact the total time (when merging 180 files).
#
# Number of pgc merged per iteration | Time (in min)
# 1 | 27.2
# 10 | 12.8
# 20 | 12.0
# 30 | 11.5
# 40 | 11.4
# 50 | 11.5
# 60 | 11.6
# 70 | 11.6
# 80 | 11.7
#
# TODO(sebmarchand): Measure the memory usage of pgomgr.exe to see how it get
# affected by the number of pgc files.
_BATCH_SIZE_DEFAULT = 10
def find_pgomgr(chrome_checkout_dir):
"""Find pgomgr.exe."""
win_toolchain_json_file = os.path.join(chrome_checkout_dir, 'build',
'win_toolchain.json')
if not os.path.exists(win_toolchain_json_file):
raise Exception('The toolchain JSON file is missing.')
with open(win_toolchain_json_file) as temp_f:
toolchain_data = json.load(temp_f)
if not os.path.isdir(toolchain_data['path']):
raise Exception('The toolchain JSON file is invalid.')
# Always use the x64 version of pgomgr (the x86 one doesn't work on the bot's
# environment).
pgomgr_dir = None
if toolchain_data['version'] == '2017':
vc_tools_root = vs_toolchain.FindVCToolsRoot()
pgomgr_dir = os.path.join(vc_tools_root, 'HostX64', 'x64')
pgomgr_path = os.path.join(pgomgr_dir, 'pgomgr.exe')
if not os.path.exists(pgomgr_path):
raise Exception('pgomgr.exe is missing from %s.' % pgomgr_dir)
return pgomgr_path
def merge_pgc_files(pgomgr_path, files, pgd_path):
"""Merge all the pgc_files in |files| to |pgd_path|."""
merge_command = [
pgomgr_path,
'/merge'
]
merge_command.extend(files)
merge_command.append(pgd_path)
proc = subprocess.Popen(merge_command, stdout=subprocess.PIPE)
stdout, _ = proc.communicate()
print stdout
return proc.returncode
def main():
parser = optparse.OptionParser(usage='%prog [options]')
parser.add_option('--checkout-dir', help='The Chrome checkout directory.')
parser.add_option('--target-cpu', help='[DEPRECATED] The target\'s bitness.')
parser.add_option('--build-dir', help='Chrome build directory.')
parser.add_option('--binary-name', help='The binary for which the PGC files '
'should be merged, without extension.')
parser.add_option('--files-per-iter', help='The number of PGC files to merge '
'in each iteration, default to %d.' % _BATCH_SIZE_DEFAULT,
type='int', default=_BATCH_SIZE_DEFAULT)
options, _ = parser.parse_args()
if not options.checkout_dir:
parser.error('--checkout-dir is required')
if not options.build_dir:
parser.error('--build-dir is required')
if not options.binary_name:
parser.error('--binary-name is required')
# Starts by finding pgomgr.exe.
pgomgr_path = find_pgomgr(options.checkout_dir)
pgc_files = glob.glob(os.path.join(options.build_dir,
'%s*.pgc' % options.binary_name))
pgd_file = os.path.join(options.build_dir, '%s.pgd' % options.binary_name)
def _split_in_chunks(items, chunk_size):
"""Split |items| in chunks of size |chunk_size|.
Source: http://stackoverflow.com/a/312464
"""
for i in xrange(0, len(items), chunk_size):
yield items[i:i + chunk_size]
for chunk in _split_in_chunks(pgc_files, options.files_per_iter):
files_to_merge = []
for pgc_file in chunk:
files_to_merge.append(
os.path.join(options.build_dir, os.path.basename(pgc_file)))
ret = merge_pgc_files(pgomgr_path, files_to_merge, pgd_file)
# pgomgr.exe sometimes fails to merge too many files at the same time (it
# usually complains that a stream is missing, but if you try to merge this
# file individually it works), try to merge all the PGCs from this batch one
# at a time instead. Don't fail the build if we can't merge a file.
# TODO(sebmarchand): Report this to Microsoft, check if this is still
# happening with VS2017.
if ret != 0:
print ('Error while trying to merge several PGC files at the same time, '
'trying to merge them one by one.')
for pgc_file in chunk:
ret = merge_pgc_files(
pgomgr_path,
[os.path.join(options.build_dir, os.path.basename(pgc_file))],
pgd_file
)
if ret != 0:
print 'Error while trying to merge %s, continuing.' % pgc_file
if __name__ == '__main__':
sys.exit(main())
| Passw/gn_GFW | build/win/merge_pgc_files.py | Python | gpl-3.0 | 5,397 | 0.008894 |
# coding=utf-8
"""This module contains the abstract class of the MinimumNeeds. The storage
logic is omitted here."""
__author__ = 'Christian Christelis <christian@kartoza.com>'
__date__ = '05/10/2014'
__copyright__ = ('Copyright 2014, Australia Indonesia Facility for '
'Disaster Reduction')
from collections import OrderedDict
import json
from os.path import exists, dirname
from os import remove
from safe.utilities.i18n import tr
class MinimumNeeds(object):
"""A abstract class for handling the minimum needs.
The persistence logic is excluded from this class.
.. versionadded:: 2.2.
"""
def get_need(self, resource):
"""Get a resource from the minimum_needs.
:param resource: The resource name
:type resource: basestring
:returns: resource needed.
:rtype: dict, None
"""
for need in self.minimum_needs:
if need['name'] == resource:
return need
return None
def get_minimum_needs(self):
"""Get the minimum needed information about the minimum needs.
That is the resource and the amount.
:returns: minimum needs
:rtype: OrderedDict
"""
minimum_needs = OrderedDict()
for resource in self.minimum_needs['resources']:
if resource['Unit abbreviation']:
name = '%s [%s]' % (
tr(resource['Resource name']),
resource['Unit abbreviation']
)
else:
name = tr(resource['Resource name'])
amount = resource['Default']
minimum_needs[name] = amount
return OrderedDict(minimum_needs)
def get_full_needs(self):
"""The full list of minimum needs with all fields.
:returns: minimum needs
:rtype: dict
"""
return self.minimum_needs
def set_need(self, resource, amount, units, frequency='weekly'):
"""Append a single new minimum need entry to the list.
:param resource: Minimum need resource name.
:type resource: basestring
:param amount: Amount per person per time interval
:type amount: int, float
:param units: The unit that the resource is measured in.
:type: basestring
:param frequency: How regularly the unit needs to be dispatched
:type: basestring # maybe at some point fix this to a selection.
"""
self.minimum_needs['resources'].append({
'Resource name': resource,
'Default': amount,
'Unit abbreviation': units,
'Frequency': frequency
})
def update_minimum_needs(self, minimum_needs):
"""Overwrite the internal minimum needs with new needs.
Validate the new minimum needs. If ok, set these as the internal
minimum needs.
:param minimum_needs: The new minimum
:type minimum_needs: dict
:returns: Returns success code, -1 for failure, 0 for success.
:rtype: int
"""
if not isinstance(minimum_needs, dict):
return -1
# noinspection PyAttributeOutsideInit
self.minimum_needs = minimum_needs
return 0
@staticmethod
def _defaults():
"""Helper to get the default minimum needs.
.. note:: Key names will be translated.
"""
rice = tr('Rice')
drinking_water = tr('Drinking Water')
water = tr('Clean Water')
family_kits = tr('Family Kits')
toilets = tr('Toilets')
minimum_needs = {
"resources": [
{
"Default": "2.8",
"Minimum allowed": "0",
"Maximum allowed": "100",
"Frequency": "weekly",
"Resource name": rice,
"Resource description": "Basic food",
"Unit": "kilogram",
"Units": "kilograms",
"Unit abbreviation": "kg",
"Readable sentence": (
"Each person should be provided with {{ Default }} "
"{{ Units }} of {{ Resource name }} {{ Frequency }}.")
},
{
"Default": "17.5",
"Minimum allowed": "0",
"Maximum allowed": "100",
"Frequency": "weekly",
"Resource name": drinking_water,
"Resource description": "For drinking",
"Unit": "litre",
"Units": "litres",
"Unit abbreviation": "l",
"Readable sentence": (
"Each person should be provided with {{ Default }} "
"{{ Units }} of {{ Resource name }} {{ Frequency }} "
"for drinking.")
},
{
"Default": "67",
"Minimum allowed": "10",
"Maximum allowed": "100",
"Frequency": "weekly",
"Resource name": water,
"Resource description": "For washing",
"Unit": "litre",
"Units": "litres",
"Unit abbreviation": "l",
"Readable sentence": (
"Each person should be provided with {{ Default }} "
"{{ Units }} of {{ Resource name }} {{ Frequency }} "
"for washing.")
},
{
"Default": "0.2",
"Minimum allowed": "0.1",
"Maximum allowed": "1",
"Frequency": "weekly",
"Resource name": family_kits,
"Resource description": "Hygiene kits",
"Unit": "",
"Units": "",
"Unit abbreviation": "",
"Readable sentence": (
"Each family of 5 persons should be provided with 1 "
"Family Kit per week.")
},
{
"Default": "0.05",
"Minimum allowed": "0.02",
"Maximum allowed": "1",
"Frequency": "single",
"Resource name": toilets,
"Resource description": "",
"Unit": "",
"Units": "",
"Unit abbreviation": "",
"Readable sentence": (
"A Toilet should be provided for every 20 persons.")
}
],
"provenance": "The minimum needs are based on Perka 7/2008.",
"profile": "BNPB_en"
}
return minimum_needs
def read_from_file(self, filename):
"""Read from an existing json file.
:param filename: The file to be written to.
:type filename: basestring, str
:returns: Success status. -1 for unsuccessful 0 for success
:rtype: int
"""
if not exists(filename):
return -1
with open(filename) as fd:
needs_json = fd.read()
try:
minimum_needs = json.loads(needs_json)
except (TypeError, ValueError):
minimum_needs = None
if not minimum_needs:
return -1
return self.update_minimum_needs(minimum_needs)
def write_to_file(self, filename):
"""Write minimum needs as json to a file.
:param filename: The file to be written to.
:type filename: basestring, str
"""
if not exists(dirname(filename)):
return -1
with open(filename, 'w') as fd:
needs_json = json.dumps(self.minimum_needs)
fd.write(needs_json)
return 0
@staticmethod
def remove_file(filename):
"""Remove a minimum needs file.
:param filename: The file to be removed.
:type filename: basestring, str
"""
if not exists(dirname(filename)):
return -1
try:
remove(filename)
except OSError:
return -1
return 0
| Jannes123/inasafe | safe/common/minimum_needs.py | Python | gpl-3.0 | 8,353 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
##############################################
# The MIT License (MIT)
# Copyright (c) 2018 Kevin Walchko
# see LICENSE for full details
##############################################
from pygecko.multiprocessing import geckopy
from pygecko.multiprocessing import GeckoSimpleProcess
from pygecko.transport.protocols import MsgPack, MsgPackCustom
import time
from math import cos, pi
def pub(**kwargs):
geckopy.init_node(**kwargs)
rate = geckopy.Rate(2)
p = geckopy.pubBinderTCP("local", "bob")
if (p == None):
print("ERROR setting up publisher")
return
cnt = 0
while not geckopy.is_shutdown():
# msg = "hi" + str(cnt)
msg = [pi, cos(pi), cos(pi/2,)]
p.publish(msg)
print("sent")
rate.sleep()
cnt += 1
def sub(**kwargs):
geckopy.init_node(**kwargs)
rate = geckopy.Rate(2)
s = geckopy.subConnectTCP("local", "bob")
if (s == None):
print("ERROR setting up subscriber")
return
cnt = 0
while not geckopy.is_shutdown():
data = s.recv_nb()
print("sub:", data)
rate.sleep()
if __name__ == '__main__':
args = {}
p = GeckoSimpleProcess()
p.start(func=pub, name='pub', kwargs=args)
s = GeckoSimpleProcess()
s.start(func=sub, name='sub', kwargs=args)
| walchko/pygecko | dev/cpp-simple/subpub.py | Python | mit | 1,370 | 0.00219 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.