text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Brian Coca <bcoca@ansible.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
module: systemd
author:
- Ansible Core Team
version_added: "2.2"
short_description: Manage services
description:
- Controls systemd services on remote hosts.
options:
name:
description:
- Name of the service. When using in a chroot environment you always need to specify the full name i.e. (crond.service).
aliases: [ service, unit ]
state:
description:
- C(started)/C(stopped) are idempotent actions that will not run commands unless necessary.
C(restarted) will always bounce the service. C(reloaded) will always reload.
choices: [ reloaded, restarted, started, stopped ]
enabled:
description:
- Whether the service should start on boot. B(At least one of state and enabled are required.)
type: bool
force:
description:
- Whether to override existing symlinks.
type: bool
version_added: 2.6
masked:
description:
- Whether the unit should be masked or not, a masked unit is impossible to start.
type: bool
daemon_reload:
description:
- run daemon-reload before doing any other operations, to make sure systemd has read any changes.
type: bool
default: 'no'
aliases: [ daemon-reload ]
user:
description:
- run systemctl talking to the service manager of the calling user, rather than the service manager
of the system.
type: bool
default: 'no'
no_block:
description:
- Do not synchronously wait for the requested operation to finish.
Enqueued job will continue without Ansible blocking on its completion.
type: bool
default: 'no'
version_added: "2.3"
notes:
- Since 2.4, one of the following options is required 'state', 'enabled', 'masked', 'daemon_reload', and all except 'daemon_reload' also require 'name'.
- Before 2.4 you always required 'name'.
requirements:
- A system managed by systemd.
'''
EXAMPLES = '''
- name: Make sure a service is running
systemd:
state: started
name: httpd
- name: stop service cron on debian, if running
systemd:
name: cron
state: stopped
- name: restart service cron on centos, in all cases, also issue daemon-reload to pick up config changes
systemd:
state: restarted
daemon_reload: yes
name: crond
- name: reload service httpd, in all cases
systemd:
name: httpd
state: reloaded
- name: enable service httpd and ensure it is not masked
systemd:
name: httpd
enabled: yes
masked: no
- name: enable a timer for dnf-automatic
systemd:
name: dnf-automatic.timer
state: started
enabled: True
- name: just force systemd to reread configs (2.4 and above)
systemd:
daemon_reload: yes
'''
RETURN = '''
status:
description: A dictionary with the key=value pairs returned from `systemctl show`
returned: success
type: complex
contains: {
"ActiveEnterTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ActiveEnterTimestampMonotonic": "8135942",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "auditd.service systemd-user-sessions.service time-sync.target systemd-journald.socket basic.target system.slice",
"AllowIsolate": "no",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "1000",
"CPUAccounting": "no",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "1024",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "18446744073709551615",
"ConditionResult": "yes",
"ConditionTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ConditionTimestampMonotonic": "7902742",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/crond.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"Delegate": "no",
"Description": "Command Scheduler",
"DevicePolicy": "auto",
"EnvironmentFile": "/etc/sysconfig/crond (ignore_errors=no)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "595",
"ExecMainStartTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ExecMainStartTimestampMonotonic": "8134990",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/crond ; argv[]=/usr/sbin/crond -n $CRONDARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FragmentPath": "/usr/lib/systemd/system/crond.service",
"GuessMainPID": "yes",
"IOScheduling": "0",
"Id": "crond.service",
"IgnoreOnIsolate": "no",
"IgnoreOnSnapshot": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"InactiveExitTimestampMonotonic": "8135942",
"JobTimeoutUSec": "0",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "18446744073709551615",
"LimitCORE": "18446744073709551615",
"LimitCPU": "18446744073709551615",
"LimitDATA": "18446744073709551615",
"LimitFSIZE": "18446744073709551615",
"LimitLOCKS": "18446744073709551615",
"LimitMEMLOCK": "65536",
"LimitMSGQUEUE": "819200",
"LimitNICE": "0",
"LimitNOFILE": "4096",
"LimitNPROC": "3902",
"LimitRSS": "18446744073709551615",
"LimitRTPRIO": "0",
"LimitRTTIME": "18446744073709551615",
"LimitSIGPENDING": "3902",
"LimitSTACK": "18446744073709551615",
"LoadState": "loaded",
"MainPID": "595",
"MemoryAccounting": "no",
"MemoryLimit": "18446744073709551615",
"MountFlags": "0",
"Names": "crond.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureIsolate": "no",
"PermissionsStartOnly": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"Requires": "basic.target",
"Restart": "no",
"RestartUSec": "100ms",
"Result": "success",
"RootDirectoryStartOnly": "no",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitInterval": "10000000",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "simple",
"UMask": "0022",
"UnitFileState": "enabled",
"WantedBy": "multi-user.target",
"Wants": "system.slice",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0",
}
''' # NOQA
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.service import sysv_exists, sysv_is_enabled, fail_if_missing
from ansible.module_utils._text import to_native
def is_running_service(service_status):
return service_status['ActiveState'] in set(['active', 'activating'])
def request_was_ignored(out):
return '=' not in out and 'ignoring request' in out
def parse_systemctl_show(lines):
# The output of 'systemctl show' can contain values that span multiple lines. At first glance it
# appears that such values are always surrounded by {}, so the previous version of this code
# assumed that any value starting with { was a multi-line value; it would then consume lines
# until it saw a line that ended with }. However, it is possible to have a single-line value
# that starts with { but does not end with } (this could happen in the value for Description=,
# for example), and the previous version of this code would then consume all remaining lines as
# part of that value. Cryptically, this would lead to Ansible reporting that the service file
# couldn't be found.
#
# To avoid this issue, the following code only accepts multi-line values for keys whose names
# start with Exec (e.g., ExecStart=), since these are the only keys whose values are known to
# span multiple lines.
parsed = {}
multival = []
k = None
for line in lines:
if k is None:
if '=' in line:
k, v = line.split('=', 1)
if k.startswith('Exec') and v.lstrip().startswith('{'):
if not v.rstrip().endswith('}'):
multival.append(v)
continue
parsed[k] = v.strip()
k = None
else:
multival.append(line)
if line.rstrip().endswith('}'):
parsed[k] = '\n'.join(multival).strip()
multival = []
k = None
return parsed
# ===========================================
# Main control flow
def main():
# initialize
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str', aliases=['service', 'unit']),
state=dict(type='str', choices=['reloaded', 'restarted', 'started', 'stopped']),
enabled=dict(type='bool'),
force=dict(type='bool'),
masked=dict(type='bool'),
daemon_reload=dict(type='bool', default=False, aliases=['daemon-reload']),
user=dict(type='bool', default=False),
no_block=dict(type='bool', default=False),
),
supports_check_mode=True,
required_one_of=[['state', 'enabled', 'masked', 'daemon_reload']],
)
systemctl = module.get_bin_path('systemctl', True)
if module.params['user']:
systemctl = systemctl + " --user"
if module.params['no_block']:
systemctl = systemctl + " --no-block"
if module.params['force']:
systemctl = systemctl + " --force"
unit = module.params['name']
rc = 0
out = err = ''
result = dict(
name=unit,
changed=False,
status=dict(),
)
for requires in ('state', 'enabled', 'masked'):
if module.params[requires] is not None and unit is None:
module.fail_json(msg="name is also required when specifying %s" % requires)
# Run daemon-reload first, if requested
if module.params['daemon_reload'] and not module.check_mode:
(rc, out, err) = module.run_command("%s daemon-reload" % (systemctl))
if rc != 0:
module.fail_json(msg='failure %d during daemon-reload: %s' % (rc, err))
if unit:
found = False
is_initd = sysv_exists(unit)
is_systemd = False
# check service data, cannot error out on rc as it changes across versions, assume not found
(rc, out, err) = module.run_command("%s show '%s'" % (systemctl, unit))
if request_was_ignored(out) or request_was_ignored(err):
# fallback list-unit-files as show does not work on some systems (chroot)
# not used as primary as it skips some services (like those using init.d) and requires .service/etc notation
(rc, out, err) = module.run_command("%s list-unit-files '%s'" % (systemctl, unit))
if rc == 0:
is_systemd = True
elif rc == 0:
# load return of systemctl show into dictionary for easy access and return
if out:
result['status'] = parse_systemctl_show(to_native(out).split('\n'))
is_systemd = 'LoadState' in result['status'] and result['status']['LoadState'] != 'not-found'
# Check for loading error
if is_systemd and 'LoadError' in result['status']:
module.fail_json(msg="Error loading unit file '%s': %s" % (unit, result['status']['LoadError']))
else:
# Check for systemctl command
module.run_command(systemctl, check_rc=True)
# Does service exist?
found = is_systemd or is_initd
if is_initd and not is_systemd:
module.warn('The service (%s) is actually an init script but the system is managed by systemd' % unit)
# mask/unmask the service, if requested, can operate on services before they are installed
if module.params['masked'] is not None:
# state is not masked unless systemd affirms otherwise
masked = ('LoadState' in result['status'] and result['status']['LoadState'] == 'masked')
if masked != module.params['masked']:
result['changed'] = True
if module.params['masked']:
action = 'mask'
else:
action = 'unmask'
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
# some versions of system CAN mask/unmask non existing services, we only fail on missing if they don't
fail_if_missing(module, found, unit, msg='host')
# Enable/disable service startup at boot if requested
if module.params['enabled'] is not None:
if module.params['enabled']:
action = 'enable'
else:
action = 'disable'
fail_if_missing(module, found, unit, msg='host')
# do we need to enable the service?
enabled = False
(rc, out, err) = module.run_command("%s is-enabled '%s'" % (systemctl, unit))
# check systemctl result or if it is a init script
if rc == 0:
enabled = True
elif rc == 1:
# if not a user service and both init script and unit file exist stdout should have enabled/disabled, otherwise use rc entries
if not module.params['user'] and \
is_initd and \
(not out.strip().endswith('disabled') or sysv_is_enabled(unit)):
enabled = True
# default to current state
result['enabled'] = enabled
# Change enable/disable if needed
if enabled != module.params['enabled']:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, out + err))
result['enabled'] = not enabled
# set service state if requested
if module.params['state'] is not None:
fail_if_missing(module, found, unit, msg="host")
# default to desired state
result['state'] = module.params['state']
# What is current service state?
if 'ActiveState' in result['status']:
action = None
if module.params['state'] == 'started':
if not is_running_service(result['status']):
action = 'start'
elif module.params['state'] == 'stopped':
if is_running_service(result['status']):
action = 'stop'
else:
if not is_running_service(result['status']):
action = 'start'
else:
action = module.params['state'][:-2] # remove 'ed' from restarted/reloaded
result['state'] = 'started'
if action:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, err))
else:
# this should not happen?
module.fail_json(msg="Service is in unknown state", status=result['status'])
module.exit_json(**result)
if __name__ == '__main__':
main()
|
hkariti/ansible
|
lib/ansible/modules/system/systemd.py
|
Python
|
gpl-3.0
| 18,087 | 0.002764 |
from bedrock.redirects.util import redirect
redirectpatterns = (
# bug 926629
redirect(r'^newsletter/about_mobile(?:/(?:index\.html)?)?$', 'newsletter.subscribe'),
redirect(r'^newsletter/about_mozilla(?:/(?:index\.html)?)?$', 'mozorg.contribute.index'),
redirect(r'^newsletter/new(?:/(?:index\.html)?)?$', 'newsletter.subscribe'),
redirect(r'^newsletter/ios(?:/(?:index\.html)?)?$', 'firefox.mobile.index'),
)
|
sgarrity/bedrock
|
bedrock/newsletter/redirects.py
|
Python
|
mpl-2.0
| 432 | 0.009259 |
#!/usr/bin/python
import sys, argparse, StringIO, re, gzip
from multiprocessing import Pool, cpu_count, Queue
from Bio.Format.BGZF import is_bgzf, reader as BGZF_reader, get_block_bounds
from Bio.Format.Fastq import FastqEntry
# Create an index for bgzf zipped fastq files.
# Pre: A fastq file that has been compressed by bgzf
# Post: the Pre file, with the exension .bgi added.
# the index is gzipped
# <name> <blockStart> <innerStart> <dataSize> <read length>
# Be cautious that name could contain spaces and tabs
# global
blocks = {}
ncount = 1
def main():
global blocks
parser = argparse.ArgumentParser(description="Take a bgzf compressed fastq file and make an index",formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('input_file',help="BGZF compressed fastq file")
parser.add_argument('--threads',type=int,default=1,help="number of threads")
args = parser.parse_args()
if not is_bgzf(args.input_file):
sys.stderr.write("ERROR: not a proper BGZF compressed file\n")
sys.exit()
z = 0
sys.stderr.write("scanning block starts\n")
bs = get_block_bounds(args.input_file)
blocks[bs[0][0]] = [[bs[0][1],-1]]
sys.stderr.write("scanning for new lines\n")
z = 0
#if args.threads > 1:
# p = Pool(processes=args.threads)
#results = []
#for xs in [bs[j:j+args.threads*10] for j in range(0,len(bs),args.threads*10)]:
for bounds in bs:
#print xs
#for bounds in xs:
z += 1
#if args.threads > 1:
# nls = p.apply_async(get_nls,args=(xs,args.input_file,z,))
#else:
# nls = Queue()
# nls.put(get_nls(xs,args.input_file,z))
v = get_nls(bounds,args.input_file,z)
do_nls_output(v)
#results.append(nls)
sys.stderr.write(str(z)+'/'+str(len(bs))+"\r")
#sys.exit()
#if args.threads > 1:
# p.close()
# p.join()
sys.stderr.write("\n")
sys.stderr.write("Traverse blocks and writing index\n")
of = gzip.open(args.input_file+'.bgi','w')
z = 0
for block in sorted(blocks):
z+=1
sys.stderr.write(str(z)+'/'+str(len(blocks))+"\r")
if len(blocks[block]) == 0: continue
bend = blocks[block][0][0]
starts = [x[1]+1 for x in blocks[block]]
with open(args.input_file,'rb') as inf:
inf.seek(block)
bytes = inf.read(bend-block)
s = StringIO.StringIO(bytes)
v = BGZF_reader(s)
ubytes = v.read(70000)
# now we can find all the new starts
# do all but the last
#print ubytes[starts[-2]:]
for i in range(len(starts)-1):
if starts[i] >= len(ubytes): #problem
sys.stderr.write("Problem start\n")
sys.exit()
m = re.match('([^\n]+)\n([^\n]+)(\n[^\n]+\n[^\n]+)',ubytes[starts[i]:])
if not m:
sys.stderr.write("Problem overlap\n")
sys.exit()
else:
if m.group(1)[0] != '@':
sys.stderr.write("failed to parse last\n")
sys.exit()
of.write(m.group(1)[1:]+"\t"+str(block)+"\t"+str(starts[i])+"\t"+str(len(m.group(1))+len(m.group(2))+len(m.group(3))+2)+"\t"+str(len(m.group(2)))+"\n")
with open(args.input_file,'rb') as inf:
v2 = BGZF_reader(inf,blockStart=block,innerStart=starts[-1]-1)
spc = v2.read(1)
if spc != "\n":
sys.stderr.write("expected newline\n")
sys.exit()
cur = v2.get_block_start()
inn = v2.get_inner_start()
buffer = ''
for i in range(0,4):
while True:
c = v2.read(1)
if len(c) == 0: break
buffer += c
if c == "\n": break
if buffer == "":
break
m = re.match('([^\n]+)\n([^\n]+)',buffer)
if not m:
sys.stderr.write("failed to parse last\n"+buffer+"\n")
sys.exit()
if m.group(1)[0] != '@':
sys.stderr.write("failed to parse last\n"+buffer+"\n")
sys.exit()
of.write(m.group(1)[1:]+"\t"+str(cur)+"\t"+str(inn)+"\t"+str(len(buffer))+"\t"+str(len(m.group(2)))+"\n")
sys.stderr.write("\n")
sys.exit()
buffer = ''
with open(args.input_file) as inf:
#inf.seek(bs[i])
reader = BGZF_reader(inf)
while True:
cur = reader.get_block_start()
inn = reader.get_inner_start()
fq = readfastq(reader)
z += 1
if not fq: break
if z%1000 == 0: sys.stderr.write("Indexed "+str(z)+" reads\r")
of.write(fq['name']+"\t"+str(cur)+"\t"+str(inn)+"\n")
inf.close()
sys.stderr.write("\n")
of.close()
def get_nls(bounds,fname,i):
with open(fname,'rb') as inf:
inf.seek(bounds[0])
bytes = inf.read(bounds[1]-bounds[0])
s = StringIO.StringIO(bytes)
#v = BGZF_reader(inf,blockStart=bound[0],innerStart=0)
v = BGZF_reader(s)
ubytes = v.read(70000) # always less than 65K by definition
p = re.compile('\n')
nls = [m.start() for m in p.finditer(ubytes)]
breaks = []
for j in range(len(nls)):
breaks.append([bounds[0],bounds[1],nls[j]])
return breaks
def do_nls_output(results):
global blocks
global ncount
#local = {}
#for y in [x for x in results]:
# local[y[0]] = y[1]
#for i in sorted(local):
# for e in local[i]:
for e in results:
#print e
#print ncount
useval = False
if ncount%4 == 0: useval = True
ncount += 1
if not useval: continue
if e[0] not in blocks: blocks[e[0]] = []
blocks[e[0]].append([e[1],e[2]])
#only every fourth newline is a start
#breaks = [breaks[i] for i in range(0,len(breaks),4)]
#sys.stderr.write("Reducing to new lines indicating starts\n")
#blocks = {}
#for i in range(0,len(breaks),4):
# if breaks[i][0] not in blocks: blocks[breaks[i][0]] = []
# blocks[breaks[i][0]].append([breaks[i][1],breaks[i][2]])
def readfastq(reader):
buffer = ''
for i in range(0,4):
v = ''
while v!="\n":
v = reader.read(1)
if len(v) == 0: return False
buffer += v
if len(buffer) == 0: return False
return FastqEntry(buffer.rstrip().split("\n"))
if __name__=="__main__":
main()
|
jason-weirather/py-seq-tools
|
seqtools/cli/legacy/fastq_bgzf_index.py
|
Python
|
apache-2.0
| 6,010 | 0.030283 |
#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfbase/_fontdata.py
#$Header $
__version__=''' $Id: _fontdata.py 3052 2007-03-07 14:04:49Z rgbecker $ '''
__doc__="""
database of font related things
standardFonts tuple of the 14 standard string font names
standardEncodings tuple of the known standard font names
encodings a mapping object from standard encoding names (and minor variants)
to the encoding vectors ie the tuple of string glyph names
widthsByFontGlyph fontname x glyphname --> width of glyph
widthVectorsByFont fontName -> vector of widths
"""
import UserDict, os, sys
# mapping of name to width vector, starts empty until fonts are added
# e.g. widths['Courier'] = [...600,600,600,...]
widthVectorsByFont = {}
fontsByName = {}
fontsByBaseEnc = {}
# this is a list of the standard 14 font names in Acrobat Reader
standardFonts = (
'Courier', 'Courier-Bold', 'Courier-Oblique', 'Courier-BoldOblique',
'Helvetica', 'Helvetica-Bold', 'Helvetica-Oblique', 'Helvetica-BoldOblique',
'Times-Roman', 'Times-Bold', 'Times-Italic', 'Times-BoldItalic',
'Symbol','ZapfDingbats')
standardFontAttributes = {
#family, bold, italic defined for basic ones
'Courier':('Courier',0,0),
'Courier-Bold':('Courier',1,0),
'Courier-Oblique':('Courier',0,1),
'Courier-BoldOblique':('Courier',1,1),
'Helvetica':('Helvetica',0,0),
'Helvetica-Bold':('Helvetica',1,0),
'Helvetica-Oblique':('Helvetica',0,1),
'Helvetica-BoldOblique':('Helvetica',1,1),
'Times-Roman':('Times-Roman',0,0),
'Times-Bold':('Times-Roman',1,0),
'Times-Italic':('Times-Roman',0,1),
'Times-BoldItalic':('Times-Roman',1,1),
'Symbol':('Symbol',0,0),
'ZapfDingbats':('ZapfDingbats',0,0)
}
#this maps fontnames to the equivalent filename root.
_font2fnrMapWin32 = {
'symbol': 'Sy______',
'zapfdingbats': 'Zd______',
'helvetica': '_a______',
'helvetica-bold': '_ab_____',
'helvetica-boldoblique': '_abi____',
'helvetica-oblique': '_ai_____',
'times-bold': '_eb_____',
'times-bolditalic': '_ebi____',
'times-italic': '_ei_____',
'times-roman': '_er_____',
'courier-bold': 'cob_____',
'courier-boldoblique': 'cobo____',
'courier': 'com_____',
'courier-oblique': 'coo_____',
}
if sys.platform in ('linux2',):
_font2fnrMapLinux2 ={
'symbol': 'Symbol',
'zapfdingbats': 'ZapfDingbats',
'helvetica': 'Arial',
'helvetica-bold': 'Arial-Bold',
'helvetica-boldoblique': 'Arial-BoldItalic',
'helvetica-oblique': 'Arial-Italic',
'times-bold': 'TimesNewRoman-Bold',
'times-bolditalic':'TimesNewRoman-BoldItalic',
'times-italic': 'TimesNewRoman-Italic',
'times-roman': 'TimesNewRoman',
'courier-bold': 'Courier-Bold',
'courier-boldoblique': 'Courier-BoldOblique',
'courier': 'Courier',
'courier-oblique': 'Courier-Oblique',
}
_font2fnrMap = _font2fnrMapLinux2
for k, v in _font2fnrMap.items():
if k in _font2fnrMapWin32.keys():
_font2fnrMapWin32[v.lower()] = _font2fnrMapWin32[k]
del k, v
else:
_font2fnrMap = _font2fnrMapWin32
def _findFNR(fontName):
return _font2fnrMap[fontName.lower()]
from reportlab.rl_config import T1SearchPath
from reportlab.lib.utils import rl_isfile
def _searchT1Dirs(n,rl_isfile=rl_isfile,T1SearchPath=T1SearchPath):
assert T1SearchPath!=[], "No Type-1 font search path"
for d in T1SearchPath:
f = os.path.join(d,n)
if rl_isfile(f): return f
return None
del T1SearchPath, rl_isfile
def findT1File(fontName,ext='.pfb'):
if sys.platform in ('linux2',) and ext=='.pfb':
try:
f = _searchT1Dirs(_findFNR(fontName))
if f: return f
except:
pass
try:
f = _searchT1Dirs(_font2fnrMapWin32[fontName.lower()]+ext)
if f: return f
except:
pass
return _searchT1Dirs(_findFNR(fontName)+ext)
# this lists the predefined font encodings - WinAnsi and MacRoman. We have
# not added MacExpert - it's possible, but would complicate life and nobody
# is asking. StandardEncoding means something special.
standardEncodings = ('WinAnsiEncoding','MacRomanEncoding','StandardEncoding','SymbolEncoding','ZapfDingbatsEncoding','PDFDocEncoding', 'MacExpertEncoding')
#this is the global mapping of standard encodings to name vectors
class _Name2StandardEncodingMap(UserDict.UserDict):
'''Trivial fake dictionary with some [] magic'''
_XMap = {'winansi':'WinAnsiEncoding','macroman': 'MacRomanEncoding','standard':'StandardEncoding','symbol':'SymbolEncoding', 'zapfdingbats':'ZapfDingbatsEncoding','pdfdoc':'PDFDocEncoding', 'macexpert':'MacExpertEncoding'}
def __setitem__(self,x,v):
y = x.lower()
if y[-8:]=='encoding': y = y[:-8]
y = self._XMap[y]
if y in self.keys(): raise IndexError, 'Encoding %s is already set' % y
self.data[y] = v
def __getitem__(self,x):
y = x.lower()
if y[-8:]=='encoding': y = y[:-8]
y = self._XMap[y]
return self.data[y]
encodings = _Name2StandardEncodingMap()
encodings['WinAnsiEncoding'] = (
None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, 'space', 'exclam',
'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand',
'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma',
'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four',
'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less',
'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F',
'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright',
'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright',
'asciitilde', 'bullet', 'Euro', 'bullet', 'quotesinglbase', 'florin',
'quotedblbase', 'ellipsis', 'dagger', 'daggerdbl', 'circumflex',
'perthousand', 'Scaron', 'guilsinglleft', 'OE', 'bullet', 'Zcaron',
'bullet', 'bullet', 'quoteleft', 'quoteright', 'quotedblleft',
'quotedblright', 'bullet', 'endash', 'emdash', 'tilde', 'trademark',
'scaron', 'guilsinglright', 'oe', 'bullet', 'zcaron', 'Ydieresis',
'space', 'exclamdown', 'cent', 'sterling', 'currency', 'yen', 'brokenbar',
'section', 'dieresis', 'copyright', 'ordfeminine', 'guillemotleft',
'logicalnot', 'hyphen', 'registered', 'macron', 'degree', 'plusminus',
'twosuperior', 'threesuperior', 'acute', 'mu', 'paragraph', 'periodcentered',
'cedilla', 'onesuperior', 'ordmasculine', 'guillemotright', 'onequarter',
'onehalf', 'threequarters', 'questiondown', 'Agrave', 'Aacute',
'Acircumflex', 'Atilde', 'Adieresis', 'Aring', 'AE', 'Ccedilla',
'Egrave', 'Eacute', 'Ecircumflex', 'Edieresis', 'Igrave', 'Iacute',
'Icircumflex', 'Idieresis', 'Eth', 'Ntilde', 'Ograve', 'Oacute',
'Ocircumflex', 'Otilde', 'Odieresis', 'multiply', 'Oslash', 'Ugrave',
'Uacute', 'Ucircumflex', 'Udieresis', 'Yacute', 'Thorn', 'germandbls',
'agrave', 'aacute', 'acircumflex', 'atilde', 'adieresis', 'aring', 'ae',
'ccedilla', 'egrave', 'eacute', 'ecircumflex', 'edieresis', 'igrave',
'iacute', 'icircumflex', 'idieresis', 'eth', 'ntilde', 'ograve', 'oacute',
'ocircumflex', 'otilde', 'odieresis', 'divide', 'oslash', 'ugrave', 'uacute',
'ucircumflex', 'udieresis', 'yacute', 'thorn', 'ydieresis')
encodings['MacRomanEncoding'] = (
None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, 'space', 'exclam',
'quotedbl', 'numbersign', 'dollar', 'percent', 'ampersand',
'quotesingle', 'parenleft', 'parenright', 'asterisk', 'plus', 'comma',
'hyphen', 'period', 'slash', 'zero', 'one', 'two', 'three', 'four',
'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon', 'less',
'equal', 'greater', 'question', 'at', 'A', 'B', 'C', 'D', 'E', 'F',
'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T',
'U', 'V', 'W', 'X', 'Y', 'Z', 'bracketleft', 'backslash', 'bracketright',
'asciicircum', 'underscore', 'grave', 'a', 'b', 'c', 'd', 'e', 'f',
'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'braceleft', 'bar', 'braceright',
'asciitilde', None, 'Adieresis', 'Aring', 'Ccedilla', 'Eacute',
'Ntilde', 'Odieresis', 'Udieresis', 'aacute', 'agrave', 'acircumflex',
'adieresis', 'atilde', 'aring', 'ccedilla', 'eacute', 'egrave',
'ecircumflex', 'edieresis', 'iacute', 'igrave', 'icircumflex',
'idieresis', 'ntilde', 'oacute', 'ograve', 'ocircumflex', 'odieresis',
'otilde', 'uacute', 'ugrave', 'ucircumflex', 'udieresis', 'dagger',
'degree', 'cent', 'sterling', 'section', 'bullet', 'paragraph',
'germandbls', 'registered', 'copyright', 'trademark', 'acute',
'dieresis', None, 'AE', 'Oslash', None, 'plusminus', None, None, 'yen',
'mu', None, None, None, None, None, 'ordfeminine', 'ordmasculine', None,
'ae', 'oslash', 'questiondown', 'exclamdown', 'logicalnot', None, 'florin',
None, None, 'guillemotleft', 'guillemotright', 'ellipsis', 'space', 'Agrave',
'Atilde', 'Otilde', 'OE', 'oe', 'endash', 'emdash', 'quotedblleft',
'quotedblright', 'quoteleft', 'quoteright', 'divide', None, 'ydieresis',
'Ydieresis', 'fraction', 'currency', 'guilsinglleft', 'guilsinglright',
'fi', 'fl', 'daggerdbl', 'periodcentered', 'quotesinglbase',
'quotedblbase', 'perthousand', 'Acircumflex', 'Ecircumflex', 'Aacute',
'Edieresis', 'Egrave', 'Iacute', 'Icircumflex', 'Idieresis', 'Igrave',
'Oacute', 'Ocircumflex', None, 'Ograve', 'Uacute', 'Ucircumflex',
'Ugrave', 'dotlessi', 'circumflex', 'tilde', 'macron', 'breve',
'dotaccent', 'ring', 'cedilla', 'hungarumlaut', 'ogonek', 'caron')
encodings['SymbolEncoding']=(None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, 'space',
'exclam', 'universal', 'numbersign', 'existential', 'percent', 'ampersand', 'suchthat',
'parenleft', 'parenright', 'asteriskmath', 'plus', 'comma', 'minus', 'period', 'slash', 'zero',
'one', 'two', 'three', 'four', 'five', 'six', 'seven', 'eight', 'nine', 'colon', 'semicolon',
'less', 'equal', 'greater', 'question', 'congruent', 'Alpha', 'Beta', 'Chi', 'Delta', 'Epsilon',
'Phi', 'Gamma', 'Eta', 'Iota', 'theta1', 'Kappa', 'Lambda', 'Mu', 'Nu', 'Omicron', 'Pi', 'Theta',
'Rho', 'Sigma', 'Tau', 'Upsilon', 'sigma1', 'Omega', 'Xi', 'Psi', 'Zeta', 'bracketleft',
'therefore', 'bracketright', 'perpendicular', 'underscore', 'radicalex', 'alpha', 'beta', 'chi',
'delta', 'epsilon', 'phi', 'gamma', 'eta', 'iota', 'phi1', 'kappa', 'lambda', 'mu', 'nu',
'omicron', 'pi', 'theta', 'rho', 'sigma', 'tau', 'upsilon', 'omega1', 'omega', 'xi', 'psi', 'zeta',
'braceleft', 'bar', 'braceright', 'similar', None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, 'Euro', 'Upsilon1', 'minute', 'lessequal',
'fraction', 'infinity', 'florin', 'club', 'diamond', 'heart', 'spade', 'arrowboth', 'arrowleft',
'arrowup', 'arrowright', 'arrowdown', 'degree', 'plusminus', 'second', 'greaterequal', 'multiply',
'proportional', 'partialdiff', 'bullet', 'divide', 'notequal', 'equivalence', 'approxequal',
'ellipsis', 'arrowvertex', 'arrowhorizex', 'carriagereturn', 'aleph', 'Ifraktur', 'Rfraktur',
'weierstrass', 'circlemultiply', 'circleplus', 'emptyset', 'intersection', 'union',
'propersuperset', 'reflexsuperset', 'notsubset', 'propersubset', 'reflexsubset', 'element',
'notelement', 'angle', 'gradient', 'registerserif', 'copyrightserif', 'trademarkserif', 'product',
'radical', 'dotmath', 'logicalnot', 'logicaland', 'logicalor', 'arrowdblboth', 'arrowdblleft',
'arrowdblup', 'arrowdblright', 'arrowdbldown', 'lozenge', 'angleleft', 'registersans',
'copyrightsans', 'trademarksans', 'summation', 'parenlefttp', 'parenleftex', 'parenleftbt',
'bracketlefttp', 'bracketleftex', 'bracketleftbt', 'bracelefttp', 'braceleftmid', 'braceleftbt',
'braceex', None, 'angleright', 'integral', 'integraltp', 'integralex', 'integralbt',
'parenrighttp', 'parenrightex', 'parenrightbt', 'bracketrighttp', 'bracketrightex',
'bracketrightbt', 'bracerighttp', 'bracerightmid', 'bracerightbt', None)
encodings['ZapfDingbatsEncoding'] = ( None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
'space', 'a1', 'a2', 'a202', 'a3', 'a4', 'a5', 'a119', 'a118', 'a117', 'a11', 'a12', 'a13', 'a14',
'a15', 'a16', 'a105', 'a17', 'a18', 'a19', 'a20', 'a21', 'a22', 'a23', 'a24', 'a25', 'a26', 'a27',
'a28', 'a6', 'a7', 'a8', 'a9', 'a10', 'a29', 'a30', 'a31', 'a32', 'a33', 'a34', 'a35', 'a36',
'a37', 'a38', 'a39', 'a40', 'a41', 'a42', 'a43', 'a44', 'a45', 'a46', 'a47', 'a48', 'a49', 'a50',
'a51', 'a52', 'a53', 'a54', 'a55', 'a56', 'a57', 'a58', 'a59', 'a60', 'a61', 'a62', 'a63', 'a64',
'a65', 'a66', 'a67', 'a68', 'a69', 'a70', 'a71', 'a72', 'a73', 'a74', 'a203', 'a75', 'a204', 'a76',
'a77', 'a78', 'a79', 'a81', 'a82', 'a83', 'a84', 'a97', 'a98', 'a99', 'a100', None, 'a89', 'a90',
'a93', 'a94', 'a91', 'a92', 'a205', 'a85', 'a206', 'a86', 'a87', 'a88', 'a95', 'a96', None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, 'a101', 'a102', 'a103', 'a104', 'a106', 'a107', 'a108', 'a112', 'a111', 'a110', 'a109',
'a120', 'a121', 'a122', 'a123', 'a124', 'a125', 'a126', 'a127', 'a128', 'a129', 'a130', 'a131',
'a132', 'a133', 'a134', 'a135', 'a136', 'a137', 'a138', 'a139', 'a140', 'a141', 'a142', 'a143',
'a144', 'a145', 'a146', 'a147', 'a148', 'a149', 'a150', 'a151', 'a152', 'a153', 'a154', 'a155',
'a156', 'a157', 'a158', 'a159', 'a160', 'a161', 'a163', 'a164', 'a196', 'a165', 'a192', 'a166',
'a167', 'a168', 'a169', 'a170', 'a171', 'a172', 'a173', 'a162', 'a174', 'a175', 'a176', 'a177',
'a178', 'a179', 'a193', 'a180', 'a199', 'a181', 'a200', 'a182', None, 'a201', 'a183', 'a184',
'a197', 'a185', 'a194', 'a198', 'a186', 'a195', 'a187', 'a188', 'a189', 'a190', 'a191', None)
encodings['StandardEncoding']=(None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,"space","exclam",
"quotedbl","numbersign","dollar","percent","ampersand","quoteright","parenleft","parenright","asterisk","plus",
"comma","hyphen","period","slash","zero","one","two","three","four","five","six","seven","eight","nine","colon",
"semicolon","less","equal","greater","question","at","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O",
"P","Q","R","S","T","U","V","W","X","Y","Z","bracketleft","backslash","bracketright","asciicircum","underscore",
"quoteleft","a","b","c","d","e","f","g","h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y",
"z","braceleft","bar","braceright","asciitilde",None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,
None,None,None,"exclamdown","cent","sterling","fraction","yen","florin","section","currency","quotesingle","quotedblleft",
"guillemotleft","guilsinglleft","guilsinglright","fi","fl",None,"endash","dagger","daggerdbl","periodcentered",None,
"paragraph","bullet","quotesinglbase","quotedblbase","quotedblright","guillemotright","ellipsis","perthousand",
None,"questiondown",None,"grave","acute","circumflex","tilde","macron","breve","dotaccent","dieresis",None,"ring",
"cedilla",None,"hungarumlaut","ogonek","caron","emdash",None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,"AE",None,"ordfeminine",
None,None,None,None,"Lslash","Oslash","OE","ordmasculine",None,None,None,None,None,"ae",None,None,None,"dotlessi",None,None,"lslash","oslash",
"oe","germandbls",None,None,None,None)
encodings['PDFDocEncoding']=(None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,None,
None,None,None,None,None,"breve","caron","circumflex",
"dotaccent","hungarumlaut","ogonek","ring","tilde","space","exclam","quotedbl","numbersign","dollar","percent",
"ampersand","quotesingle","parenleft","parenright","asterisk","plus","comma","hyphen","period","slash","zero",
"one","two","three","four","five","six","seven","eight","nine","colon","semicolon","less","equal","greater",
"question","at","A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X",
"Y","Z","bracketleft","backslash","bracketright","asciicircum","underscore","grave","a","b","c","d","e","f","g",
"h","i","j","k","l","m","n","o","p","q","r","s","t","u","v","w","x","y","z","braceleft","bar","braceright",
"asciitilde",None,"bullet","dagger","daggerdbl","ellipsis","emdash","endash","florin","fraction","guilsinglleft",
"guilsinglright","minus","perthousand","quotedblbase","quotedblleft","quotedblright","quoteleft","quoteright",
"quotesinglbase","trademark","fi","fl","Lslash","OE","Scaron","Ydieresis","Zcaron","dotlessi","lslash","oe",
"scaron","zcaron",None,"Euro","exclamdown","cent","sterling","currency","yen","brokenbar","section","dieresis",
"copyright","ordfeminine","guillemotleft","logicalnot",None,"registered","macron","degree","plusminus","twosuperior",
"threesuperior","acute","mu","paragraph","periodcentered","cedilla","onesuperior","ordmasculine","guillemotright",
"onequarter","onehalf","threequarters","questiondown","Agrave","Aacute","Acircumflex","Atilde","Adieresis","Aring",
"AE","Ccedilla","Egrave","Eacute","Ecircumflex","Edieresis","Igrave","Iacute","Icircumflex","Idieresis","Eth",
"Ntilde","Ograve","Oacute","Ocircumflex","Otilde","Odieresis","multiply","Oslash","Ugrave","Uacute","Ucircumflex",
"Udieresis","Yacute","Thorn","germandbls","agrave","aacute","acircumflex","atilde","adieresis","aring","ae",
"ccedilla","egrave","eacute","ecircumflex","edieresis","igrave","iacute","icircumflex","idieresis","eth","ntilde",
"ograve","oacute","ocircumflex","otilde","odieresis","divide","oslash","ugrave","uacute","ucircumflex","udieresis",
"yacute","thorn","ydieresis")
encodings['MacExpertEncoding'] = (None, None, None, None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
'space', 'exclamsmall', 'Hungarumlautsmall', 'centoldstyle', 'dollaroldstyle', 'dollarsuperior', 'ampersandsmall',
'Acutesmall', 'parenleftsuperior', 'parenrightsuperior', 'twodotenleader', 'onedotenleader', 'comma', 'hyphen',
'period', 'fraction', 'zerooldstyle', 'oneoldstyle', 'twooldstyle', 'threeoldstyle', 'fouroldstyle',
'fiveoldstyle', 'sixoldstyle', 'sevenoldstyle', 'eightoldstyle', 'nineoldstyle', 'colon', 'semicolon', None,
'threequartersemdash', None, 'questionsmall', None, None, None, None, 'Ethsmall', None, None, 'onequarter',
'onehalf', 'threequarters', 'oneeighth', 'threeeighths', 'fiveeighths', 'seveneighths', 'onethird', 'twothirds',
None, None, None, None, None, None, 'ff', 'fi', 'fl', 'ffi', 'ffl', 'parenleftinferior', None,
'parenrightinferior', 'Circumflexsmall', 'hypheninferior', 'Gravesmall', 'Asmall', 'Bsmall', 'Csmall', 'Dsmall',
'Esmall', 'Fsmall', 'Gsmall', 'Hsmall', 'Ismall', 'Jsmall', 'Ksmall', 'Lsmall', 'Msmall', 'Nsmall', 'Osmall',
'Psmall', 'Qsmall', 'Rsmall', 'Ssmall', 'Tsmall', 'Usmall', 'Vsmall', 'Wsmall', 'Xsmall', 'Ysmall', 'Zsmall',
'colonmonetary', 'onefitted', 'rupiah', 'Tildesmall', None, None, 'asuperior', 'centsuperior', None, None, None,
None, 'Aacutesmall', 'Agravesmall', 'Acircumflexsmall', 'Adieresissmall', 'Atildesmall', 'Aringsmall',
'Ccedillasmall', 'Eacutesmall', 'Egravesmall', 'Ecircumflexsmall', 'Edieresissmall', 'Iacutesmall', 'Igravesmall',
'Icircumflexsmall', 'Idieresissmall', 'Ntildesmall', 'Oacutesmall', 'Ogravesmall', 'Ocircumflexsmall',
'Odieresissmall', 'Otildesmall', 'Uacutesmall', 'Ugravesmall', 'Ucircumflexsmall', 'Udieresissmall', None,
'eightsuperior', 'fourinferior', 'threeinferior', 'sixinferior', 'eightinferior', 'seveninferior', 'Scaronsmall',
None, 'centinferior', 'twoinferior', None, 'Dieresissmall', None, 'Caronsmall', 'osuperior', 'fiveinferior', None,
'commainferior', 'periodinferior', 'Yacutesmall', None, 'dollarinferior', None, None, 'Thornsmall', None,
'nineinferior', 'zeroinferior', 'Zcaronsmall', 'AEsmall', 'Oslashsmall', 'questiondownsmall', 'oneinferior',
'Lslashsmall', None, None, None, None, None, None, 'Cedillasmall', None, None, None, None, None, 'OEsmall',
'figuredash', 'hyphensuperior', None, None, None, None, 'exclamdownsmall', None, 'Ydieresissmall', None,
'onesuperior', 'twosuperior', 'threesuperior', 'foursuperior', 'fivesuperior', 'sixsuperior', 'sevensuperior',
'ninesuperior', 'zerosuperior', None, 'esuperior', 'rsuperior', 'tsuperior', None, None, 'isuperior', 'ssuperior',
'dsuperior', None, None, None, None, None, 'lsuperior', 'Ogoneksmall', 'Brevesmall', 'Macronsmall', 'bsuperior',
'nsuperior', 'msuperior', 'commasuperior', 'periodsuperior', 'Dotaccentsmall', 'Ringsmall', None, None, None, None)
ascent_descent = {
'Courier': (629, -157),
'Courier-Bold': (626, -142),
'Courier-BoldOblique': (626, -142),
'Courier-Oblique': (629, -157),
'Helvetica': (718, -207),
'Helvetica-Bold': (718, -207),
'Helvetica-BoldOblique': (718, -207),
'Helvetica-Oblique': (718, -207),
'Times-Roman': (683, -217),
'Times-Bold': (676, -205),
'Times-BoldItalic': (699, -205),
'Times-Italic': (683, -205),
'Symbol': (0, 0),
'ZapfDingbats': (0, 0)
}
# nuild this up one entry at a time to stay under JPython's 64k limit.
widthsByFontGlyph = {}
widthsByFontGlyph['Helvetica'] = {'A': 667,
'AE': 1000,
'Aacute': 667,
'Acircumflex': 667,
'Adieresis': 667,
'Agrave': 667,
'Aring': 667,
'Atilde': 667,
'B': 667,
'C': 722,
'Ccedilla': 722,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 556,
'F': 611,
'G': 778,
'H': 722,
'I': 278,
'Iacute': 278,
'Icircumflex': 278,
'Idieresis': 278,
'Igrave': 278,
'J': 500,
'K': 667,
'L': 556,
'Lslash': 556,
'M': 833,
'N': 722,
'Ntilde': 722,
'O': 778,
'OE': 1000,
'Oacute': 778,
'Ocircumflex': 778,
'Odieresis': 778,
'Ograve': 778,
'Oslash': 778,
'Otilde': 778,
'P': 667,
'Q': 778,
'R': 722,
'S': 667,
'Scaron': 667,
'T': 611,
'Thorn': 667,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 667,
'W': 944,
'X': 667,
'Y': 667,
'Yacute': 667,
'Ydieresis': 667,
'Z': 611,
'Zcaron': 611,
'a': 556,
'aacute': 556,
'acircumflex': 556,
'acute': 333,
'adieresis': 556,
'ae': 889,
'agrave': 556,
'ampersand': 667,
'aring': 556,
'asciicircum': 469,
'asciitilde': 584,
'asterisk': 389,
'at': 1015,
'atilde': 556,
'b': 556,
'backslash': 278,
'bar': 260,
'braceleft': 334,
'braceright': 334,
'bracketleft': 278,
'bracketright': 278,
'breve': 333,
'brokenbar': 260,
'bullet': 350,
'c': 500,
'caron': 333,
'ccedilla': 500,
'cedilla': 333,
'cent': 556,
'circumflex': 333,
'colon': 278,
'comma': 278,
'copyright': 737,
'currency': 556,
'd': 556,
'dagger': 556,
'daggerdbl': 556,
'degree': 400,
'dieresis': 333,
'divide': 584,
'dollar': 556,
'dotaccent': 333,
'dotlessi': 278,
'e': 556,
'eacute': 556,
'ecircumflex': 556,
'edieresis': 556,
'egrave': 556,
'eight': 556,
'ellipsis': 1000,
'emdash': 1000,
'endash': 556,
'equal': 584,
'eth': 556,
'exclam': 278,
'exclamdown': 333,
'f': 278,
'fi': 500,
'five': 556,
'fl': 500,
'florin': 556,
'four': 556,
'fraction': 167,
'g': 556,
'germandbls': 611,
'grave': 333,
'greater': 584,
'guillemotleft': 556,
'guillemotright': 556,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 556,
'hungarumlaut': 333,
'hyphen': 333,
'i': 222,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 222,
'k': 500,
'l': 222,
'less': 584,
'logicalnot': 584,
'lslash': 222,
'm': 833,
'macron': 333,
'minus': 584,
'mu': 556,
'multiply': 584,
'n': 556,
'nine': 556,
'ntilde': 556,
'numbersign': 556,
'o': 556,
'oacute': 556,
'ocircumflex': 556,
'odieresis': 556,
'oe': 944,
'ogonek': 333,
'ograve': 556,
'one': 556,
'onehalf': 834,
'onequarter': 834,
'onesuperior': 333,
'ordfeminine': 370,
'ordmasculine': 365,
'oslash': 611,
'otilde': 556,
'p': 556,
'paragraph': 537,
'parenleft': 333,
'parenright': 333,
'percent': 889,
'period': 278,
'periodcentered': 278,
'perthousand': 1000,
'plus': 584,
'plusminus': 584,
'q': 556,
'question': 556,
'questiondown': 611,
'quotedbl': 355,
'quotedblbase': 333,
'quotedblleft': 333,
'quotedblright': 333,
'quoteleft': 222,
'quoteright': 222,
'quotesinglbase': 222,
'quotesingle': 191,
'r': 333,
'registered': 737,
'ring': 333,
's': 500,
'scaron': 500,
'section': 556,
'semicolon': 278,
'seven': 556,
'six': 556,
'slash': 278,
'space': 278,
'sterling': 556,
't': 278,
'thorn': 556,
'three': 556,
'threequarters': 834,
'threesuperior': 333,
'tilde': 333,
'trademark': 1000,
'two': 556,
'twosuperior': 333,
'u': 556,
'uacute': 556,
'ucircumflex': 556,
'udieresis': 556,
'ugrave': 556,
'underscore': 556,
'v': 500,
'w': 722,
'x': 500,
'y': 500,
'yacute': 500,
'ydieresis': 500,
'yen': 556,
'z': 500,
'zcaron': 500,
'zero': 556}
widthsByFontGlyph['Helvetica-Bold'] = {'A': 722,
'AE': 1000,
'Aacute': 722,
'Acircumflex': 722,
'Adieresis': 722,
'Agrave': 722,
'Aring': 722,
'Atilde': 722,
'B': 722,
'C': 722,
'Ccedilla': 722,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 556,
'F': 611,
'G': 778,
'H': 722,
'I': 278,
'Iacute': 278,
'Icircumflex': 278,
'Idieresis': 278,
'Igrave': 278,
'J': 556,
'K': 722,
'L': 611,
'Lslash': 611,
'M': 833,
'N': 722,
'Ntilde': 722,
'O': 778,
'OE': 1000,
'Oacute': 778,
'Ocircumflex': 778,
'Odieresis': 778,
'Ograve': 778,
'Oslash': 778,
'Otilde': 778,
'P': 667,
'Q': 778,
'R': 722,
'S': 667,
'Scaron': 667,
'T': 611,
'Thorn': 667,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 667,
'W': 944,
'X': 667,
'Y': 667,
'Yacute': 667,
'Ydieresis': 667,
'Z': 611,
'Zcaron': 611,
'a': 556,
'aacute': 556,
'acircumflex': 556,
'acute': 333,
'adieresis': 556,
'ae': 889,
'agrave': 556,
'ampersand': 722,
'aring': 556,
'asciicircum': 584,
'asciitilde': 584,
'asterisk': 389,
'at': 975,
'atilde': 556,
'b': 611,
'backslash': 278,
'bar': 280,
'braceleft': 389,
'braceright': 389,
'bracketleft': 333,
'bracketright': 333,
'breve': 333,
'brokenbar': 280,
'bullet': 350,
'c': 556,
'caron': 333,
'ccedilla': 556,
'cedilla': 333,
'cent': 556,
'circumflex': 333,
'colon': 333,
'comma': 278,
'copyright': 737,
'currency': 556,
'd': 611,
'dagger': 556,
'daggerdbl': 556,
'degree': 400,
'dieresis': 333,
'divide': 584,
'dollar': 556,
'dotaccent': 333,
'dotlessi': 278,
'e': 556,
'eacute': 556,
'ecircumflex': 556,
'edieresis': 556,
'egrave': 556,
'eight': 556,
'ellipsis': 1000,
'emdash': 1000,
'endash': 556,
'equal': 584,
'eth': 611,
'exclam': 333,
'exclamdown': 333,
'f': 333,
'fi': 611,
'five': 556,
'fl': 611,
'florin': 556,
'four': 556,
'fraction': 167,
'g': 611,
'germandbls': 611,
'grave': 333,
'greater': 584,
'guillemotleft': 556,
'guillemotright': 556,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 611,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 278,
'k': 556,
'l': 278,
'less': 584,
'logicalnot': 584,
'lslash': 278,
'm': 889,
'macron': 333,
'minus': 584,
'mu': 611,
'multiply': 584,
'n': 611,
'nine': 556,
'ntilde': 611,
'numbersign': 556,
'o': 611,
'oacute': 611,
'ocircumflex': 611,
'odieresis': 611,
'oe': 944,
'ogonek': 333,
'ograve': 611,
'one': 556,
'onehalf': 834,
'onequarter': 834,
'onesuperior': 333,
'ordfeminine': 370,
'ordmasculine': 365,
'oslash': 611,
'otilde': 611,
'p': 611,
'paragraph': 556,
'parenleft': 333,
'parenright': 333,
'percent': 889,
'period': 278,
'periodcentered': 278,
'perthousand': 1000,
'plus': 584,
'plusminus': 584,
'q': 611,
'question': 611,
'questiondown': 611,
'quotedbl': 474,
'quotedblbase': 500,
'quotedblleft': 500,
'quotedblright': 500,
'quoteleft': 278,
'quoteright': 278,
'quotesinglbase': 278,
'quotesingle': 238,
'r': 389,
'registered': 737,
'ring': 333,
's': 556,
'scaron': 556,
'section': 556,
'semicolon': 333,
'seven': 556,
'six': 556,
'slash': 278,
'space': 278,
'sterling': 556,
't': 333,
'thorn': 611,
'three': 556,
'threequarters': 834,
'threesuperior': 333,
'tilde': 333,
'trademark': 1000,
'two': 556,
'twosuperior': 333,
'u': 611,
'uacute': 611,
'ucircumflex': 611,
'udieresis': 611,
'ugrave': 611,
'underscore': 556,
'v': 556,
'w': 778,
'x': 556,
'y': 556,
'yacute': 556,
'ydieresis': 556,
'yen': 556,
'z': 500,
'zcaron': 500,
'zero': 556}
widthsByFontGlyph['Helvetica-Oblique'] = {'A': 667,
'AE': 1000,
'Aacute': 667,
'Acircumflex': 667,
'Adieresis': 667,
'Agrave': 667,
'Aring': 667,
'Atilde': 667,
'B': 667,
'C': 722,
'Ccedilla': 722,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 556,
'F': 611,
'G': 778,
'H': 722,
'I': 278,
'Iacute': 278,
'Icircumflex': 278,
'Idieresis': 278,
'Igrave': 278,
'J': 500,
'K': 667,
'L': 556,
'Lslash': 556,
'M': 833,
'N': 722,
'Ntilde': 722,
'O': 778,
'OE': 1000,
'Oacute': 778,
'Ocircumflex': 778,
'Odieresis': 778,
'Ograve': 778,
'Oslash': 778,
'Otilde': 778,
'P': 667,
'Q': 778,
'R': 722,
'S': 667,
'Scaron': 667,
'T': 611,
'Thorn': 667,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 667,
'W': 944,
'X': 667,
'Y': 667,
'Yacute': 667,
'Ydieresis': 667,
'Z': 611,
'Zcaron': 611,
'a': 556,
'aacute': 556,
'acircumflex': 556,
'acute': 333,
'adieresis': 556,
'ae': 889,
'agrave': 556,
'ampersand': 667,
'aring': 556,
'asciicircum': 469,
'asciitilde': 584,
'asterisk': 389,
'at': 1015,
'atilde': 556,
'b': 556,
'backslash': 278,
'bar': 260,
'braceleft': 334,
'braceright': 334,
'bracketleft': 278,
'bracketright': 278,
'breve': 333,
'brokenbar': 260,
'bullet': 350,
'c': 500,
'caron': 333,
'ccedilla': 500,
'cedilla': 333,
'cent': 556,
'circumflex': 333,
'colon': 278,
'comma': 278,
'copyright': 737,
'currency': 556,
'd': 556,
'dagger': 556,
'daggerdbl': 556,
'degree': 400,
'dieresis': 333,
'divide': 584,
'dollar': 556,
'dotaccent': 333,
'dotlessi': 278,
'e': 556,
'eacute': 556,
'ecircumflex': 556,
'edieresis': 556,
'egrave': 556,
'eight': 556,
'ellipsis': 1000,
'emdash': 1000,
'endash': 556,
'equal': 584,
'eth': 556,
'exclam': 278,
'exclamdown': 333,
'f': 278,
'fi': 500,
'five': 556,
'fl': 500,
'florin': 556,
'four': 556,
'fraction': 167,
'g': 556,
'germandbls': 611,
'grave': 333,
'greater': 584,
'guillemotleft': 556,
'guillemotright': 556,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 556,
'hungarumlaut': 333,
'hyphen': 333,
'i': 222,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 222,
'k': 500,
'l': 222,
'less': 584,
'logicalnot': 584,
'lslash': 222,
'm': 833,
'macron': 333,
'minus': 584,
'mu': 556,
'multiply': 584,
'n': 556,
'nine': 556,
'ntilde': 556,
'numbersign': 556,
'o': 556,
'oacute': 556,
'ocircumflex': 556,
'odieresis': 556,
'oe': 944,
'ogonek': 333,
'ograve': 556,
'one': 556,
'onehalf': 834,
'onequarter': 834,
'onesuperior': 333,
'ordfeminine': 370,
'ordmasculine': 365,
'oslash': 611,
'otilde': 556,
'p': 556,
'paragraph': 537,
'parenleft': 333,
'parenright': 333,
'percent': 889,
'period': 278,
'periodcentered': 278,
'perthousand': 1000,
'plus': 584,
'plusminus': 584,
'q': 556,
'question': 556,
'questiondown': 611,
'quotedbl': 355,
'quotedblbase': 333,
'quotedblleft': 333,
'quotedblright': 333,
'quoteleft': 222,
'quoteright': 222,
'quotesinglbase': 222,
'quotesingle': 191,
'r': 333,
'registered': 737,
'ring': 333,
's': 500,
'scaron': 500,
'section': 556,
'semicolon': 278,
'seven': 556,
'six': 556,
'slash': 278,
'space': 278,
'sterling': 556,
't': 278,
'thorn': 556,
'three': 556,
'threequarters': 834,
'threesuperior': 333,
'tilde': 333,
'trademark': 1000,
'two': 556,
'twosuperior': 333,
'u': 556,
'uacute': 556,
'ucircumflex': 556,
'udieresis': 556,
'ugrave': 556,
'underscore': 556,
'v': 500,
'w': 722,
'x': 500,
'y': 500,
'yacute': 500,
'ydieresis': 500,
'yen': 556,
'z': 500,
'zcaron': 500,
'zero': 556}
widthsByFontGlyph['Helvetica-BoldOblique'] = {'A': 722,
'AE': 1000,
'Aacute': 722,
'Acircumflex': 722,
'Adieresis': 722,
'Agrave': 722,
'Aring': 722,
'Atilde': 722,
'B': 722,
'C': 722,
'Ccedilla': 722,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 556,
'F': 611,
'G': 778,
'H': 722,
'I': 278,
'Iacute': 278,
'Icircumflex': 278,
'Idieresis': 278,
'Igrave': 278,
'J': 556,
'K': 722,
'L': 611,
'Lslash': 611,
'M': 833,
'N': 722,
'Ntilde': 722,
'O': 778,
'OE': 1000,
'Oacute': 778,
'Ocircumflex': 778,
'Odieresis': 778,
'Ograve': 778,
'Oslash': 778,
'Otilde': 778,
'P': 667,
'Q': 778,
'R': 722,
'S': 667,
'Scaron': 667,
'T': 611,
'Thorn': 667,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 667,
'W': 944,
'X': 667,
'Y': 667,
'Yacute': 667,
'Ydieresis': 667,
'Z': 611,
'Zcaron': 611,
'a': 556,
'aacute': 556,
'acircumflex': 556,
'acute': 333,
'adieresis': 556,
'ae': 889,
'agrave': 556,
'ampersand': 722,
'aring': 556,
'asciicircum': 584,
'asciitilde': 584,
'asterisk': 389,
'at': 975,
'atilde': 556,
'b': 611,
'backslash': 278,
'bar': 280,
'braceleft': 389,
'braceright': 389,
'bracketleft': 333,
'bracketright': 333,
'breve': 333,
'brokenbar': 280,
'bullet': 350,
'c': 556,
'caron': 333,
'ccedilla': 556,
'cedilla': 333,
'cent': 556,
'circumflex': 333,
'colon': 333,
'comma': 278,
'copyright': 737,
'currency': 556,
'd': 611,
'dagger': 556,
'daggerdbl': 556,
'degree': 400,
'dieresis': 333,
'divide': 584,
'dollar': 556,
'dotaccent': 333,
'dotlessi': 278,
'e': 556,
'eacute': 556,
'ecircumflex': 556,
'edieresis': 556,
'egrave': 556,
'eight': 556,
'ellipsis': 1000,
'emdash': 1000,
'endash': 556,
'equal': 584,
'eth': 611,
'exclam': 333,
'exclamdown': 333,
'f': 333,
'fi': 611,
'five': 556,
'fl': 611,
'florin': 556,
'four': 556,
'fraction': 167,
'g': 611,
'germandbls': 611,
'grave': 333,
'greater': 584,
'guillemotleft': 556,
'guillemotright': 556,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 611,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 278,
'k': 556,
'l': 278,
'less': 584,
'logicalnot': 584,
'lslash': 278,
'm': 889,
'macron': 333,
'minus': 584,
'mu': 611,
'multiply': 584,
'n': 611,
'nine': 556,
'ntilde': 611,
'numbersign': 556,
'o': 611,
'oacute': 611,
'ocircumflex': 611,
'odieresis': 611,
'oe': 944,
'ogonek': 333,
'ograve': 611,
'one': 556,
'onehalf': 834,
'onequarter': 834,
'onesuperior': 333,
'ordfeminine': 370,
'ordmasculine': 365,
'oslash': 611,
'otilde': 611,
'p': 611,
'paragraph': 556,
'parenleft': 333,
'parenright': 333,
'percent': 889,
'period': 278,
'periodcentered': 278,
'perthousand': 1000,
'plus': 584,
'plusminus': 584,
'q': 611,
'question': 611,
'questiondown': 611,
'quotedbl': 474,
'quotedblbase': 500,
'quotedblleft': 500,
'quotedblright': 500,
'quoteleft': 278,
'quoteright': 278,
'quotesinglbase': 278,
'quotesingle': 238,
'r': 389,
'registered': 737,
'ring': 333,
's': 556,
'scaron': 556,
'section': 556,
'semicolon': 333,
'seven': 556,
'six': 556,
'slash': 278,
'space': 278,
'sterling': 556,
't': 333,
'thorn': 611,
'three': 556,
'threequarters': 834,
'threesuperior': 333,
'tilde': 333,
'trademark': 1000,
'two': 556,
'twosuperior': 333,
'u': 611,
'uacute': 611,
'ucircumflex': 611,
'udieresis': 611,
'ugrave': 611,
'underscore': 556,
'v': 556,
'w': 778,
'x': 556,
'y': 556,
'yacute': 556,
'ydieresis': 556,
'yen': 556,
'z': 500,
'zcaron': 500,
'zero': 556}
# Courier can be expressed more compactly!
_w = {}
for charname in widthsByFontGlyph['Helvetica'].keys():
_w[charname] = 600
widthsByFontGlyph['Courier'] = _w
widthsByFontGlyph['Courier-Bold'] = _w
widthsByFontGlyph['Courier-Oblique'] = _w
widthsByFontGlyph['Courier-BoldOblique'] = _w
widthsByFontGlyph['Times-Roman'] = {'A': 722,
'AE': 889,
'Aacute': 722,
'Acircumflex': 722,
'Adieresis': 722,
'Agrave': 722,
'Aring': 722,
'Atilde': 722,
'B': 667,
'C': 667,
'Ccedilla': 667,
'D': 722,
'E': 611,
'Eacute': 611,
'Ecircumflex': 611,
'Edieresis': 611,
'Egrave': 611,
'Eth': 722,
'Euro': 500,
'F': 556,
'G': 722,
'H': 722,
'I': 333,
'Iacute': 333,
'Icircumflex': 333,
'Idieresis': 333,
'Igrave': 333,
'J': 389,
'K': 722,
'L': 611,
'Lslash': 611,
'M': 889,
'N': 722,
'Ntilde': 722,
'O': 722,
'OE': 889,
'Oacute': 722,
'Ocircumflex': 722,
'Odieresis': 722,
'Ograve': 722,
'Oslash': 722,
'Otilde': 722,
'P': 556,
'Q': 722,
'R': 667,
'S': 556,
'Scaron': 556,
'T': 611,
'Thorn': 556,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 722,
'W': 944,
'X': 722,
'Y': 722,
'Yacute': 722,
'Ydieresis': 722,
'Z': 611,
'Zcaron': 611,
'a': 444,
'aacute': 444,
'acircumflex': 444,
'acute': 333,
'adieresis': 444,
'ae': 667,
'agrave': 444,
'ampersand': 778,
'aring': 444,
'asciicircum': 469,
'asciitilde': 541,
'asterisk': 500,
'at': 921,
'atilde': 444,
'b': 500,
'backslash': 278,
'bar': 200,
'braceleft': 480,
'braceright': 480,
'bracketleft': 333,
'bracketright': 333,
'breve': 333,
'brokenbar': 200,
'bullet': 350,
'c': 444,
'caron': 333,
'ccedilla': 444,
'cedilla': 333,
'cent': 500,
'circumflex': 333,
'colon': 278,
'comma': 250,
'copyright': 760,
'currency': 500,
'd': 500,
'dagger': 500,
'daggerdbl': 500,
'degree': 400,
'dieresis': 333,
'divide': 564,
'dollar': 500,
'dotaccent': 333,
'dotlessi': 278,
'e': 444,
'eacute': 444,
'ecircumflex': 444,
'edieresis': 444,
'egrave': 444,
'eight': 500,
'ellipsis': 1000,
'emdash': 1000,
'endash': 500,
'equal': 564,
'eth': 500,
'exclam': 333,
'exclamdown': 333,
'f': 333,
'fi': 556,
'five': 500,
'fl': 556,
'florin': 500,
'four': 500,
'fraction': 167,
'g': 500,
'germandbls': 500,
'grave': 333,
'greater': 564,
'guillemotleft': 500,
'guillemotright': 500,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 500,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 278,
'k': 500,
'l': 278,
'less': 564,
'logicalnot': 564,
'lslash': 278,
'm': 778,
'macron': 333,
'minus': 564,
'mu': 500,
'multiply': 564,
'n': 500,
'nine': 500,
'ntilde': 500,
'numbersign': 500,
'o': 500,
'oacute': 500,
'ocircumflex': 500,
'odieresis': 500,
'oe': 722,
'ogonek': 333,
'ograve': 500,
'one': 500,
'onehalf': 750,
'onequarter': 750,
'onesuperior': 300,
'ordfeminine': 276,
'ordmasculine': 310,
'oslash': 500,
'otilde': 500,
'p': 500,
'paragraph': 453,
'parenleft': 333,
'parenright': 333,
'percent': 833,
'period': 250,
'periodcentered': 250,
'perthousand': 1000,
'plus': 564,
'plusminus': 564,
'q': 500,
'question': 444,
'questiondown': 444,
'quotedbl': 408,
'quotedblbase': 444,
'quotedblleft': 444,
'quotedblright': 444,
'quoteleft': 333,
'quoteright': 333,
'quotesinglbase': 333,
'quotesingle': 180,
'r': 333,
'registered': 760,
'ring': 333,
's': 389,
'scaron': 389,
'section': 500,
'semicolon': 278,
'seven': 500,
'six': 500,
'slash': 278,
'space': 250,
'sterling': 500,
't': 278,
'thorn': 500,
'three': 500,
'threequarters': 750,
'threesuperior': 300,
'tilde': 333,
'trademark': 980,
'two': 500,
'twosuperior': 300,
'u': 500,
'uacute': 500,
'ucircumflex': 500,
'udieresis': 500,
'ugrave': 500,
'underscore': 500,
'v': 500,
'w': 722,
'x': 500,
'y': 500,
'yacute': 500,
'ydieresis': 500,
'yen': 500,
'z': 444,
'zcaron': 444,
'zero': 500}
widthsByFontGlyph['Times-Bold'] = {'A': 722,
'AE': 1000,
'Aacute': 722,
'Acircumflex': 722,
'Adieresis': 722,
'Agrave': 722,
'Aring': 722,
'Atilde': 722,
'B': 667,
'C': 722,
'Ccedilla': 722,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 500,
'F': 611,
'G': 778,
'H': 778,
'I': 389,
'Iacute': 389,
'Icircumflex': 389,
'Idieresis': 389,
'Igrave': 389,
'J': 500,
'K': 778,
'L': 667,
'Lslash': 667,
'M': 944,
'N': 722,
'Ntilde': 722,
'O': 778,
'OE': 1000,
'Oacute': 778,
'Ocircumflex': 778,
'Odieresis': 778,
'Ograve': 778,
'Oslash': 778,
'Otilde': 778,
'P': 611,
'Q': 778,
'R': 722,
'S': 556,
'Scaron': 556,
'T': 667,
'Thorn': 611,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 722,
'W': 1000,
'X': 722,
'Y': 722,
'Yacute': 722,
'Ydieresis': 722,
'Z': 667,
'Zcaron': 667,
'a': 500,
'aacute': 500,
'acircumflex': 500,
'acute': 333,
'adieresis': 500,
'ae': 722,
'agrave': 500,
'ampersand': 833,
'aring': 500,
'asciicircum': 581,
'asciitilde': 520,
'asterisk': 500,
'at': 930,
'atilde': 500,
'b': 556,
'backslash': 278,
'bar': 220,
'braceleft': 394,
'braceright': 394,
'bracketleft': 333,
'bracketright': 333,
'breve': 333,
'brokenbar': 220,
'bullet': 350,
'c': 444,
'caron': 333,
'ccedilla': 444,
'cedilla': 333,
'cent': 500,
'circumflex': 333,
'colon': 333,
'comma': 250,
'copyright': 747,
'currency': 500,
'd': 556,
'dagger': 500,
'daggerdbl': 500,
'degree': 400,
'dieresis': 333,
'divide': 570,
'dollar': 500,
'dotaccent': 333,
'dotlessi': 278,
'e': 444,
'eacute': 444,
'ecircumflex': 444,
'edieresis': 444,
'egrave': 444,
'eight': 500,
'ellipsis': 1000,
'emdash': 1000,
'endash': 500,
'equal': 570,
'eth': 500,
'exclam': 333,
'exclamdown': 333,
'f': 333,
'fi': 556,
'five': 500,
'fl': 556,
'florin': 500,
'four': 500,
'fraction': 167,
'g': 500,
'germandbls': 556,
'grave': 333,
'greater': 570,
'guillemotleft': 500,
'guillemotright': 500,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 556,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 333,
'k': 556,
'l': 278,
'less': 570,
'logicalnot': 570,
'lslash': 278,
'm': 833,
'macron': 333,
'minus': 570,
'mu': 556,
'multiply': 570,
'n': 556,
'nine': 500,
'ntilde': 556,
'numbersign': 500,
'o': 500,
'oacute': 500,
'ocircumflex': 500,
'odieresis': 500,
'oe': 722,
'ogonek': 333,
'ograve': 500,
'one': 500,
'onehalf': 750,
'onequarter': 750,
'onesuperior': 300,
'ordfeminine': 300,
'ordmasculine': 330,
'oslash': 500,
'otilde': 500,
'p': 556,
'paragraph': 540,
'parenleft': 333,
'parenright': 333,
'percent': 1000,
'period': 250,
'periodcentered': 250,
'perthousand': 1000,
'plus': 570,
'plusminus': 570,
'q': 556,
'question': 500,
'questiondown': 500,
'quotedbl': 555,
'quotedblbase': 500,
'quotedblleft': 500,
'quotedblright': 500,
'quoteleft': 333,
'quoteright': 333,
'quotesinglbase': 333,
'quotesingle': 278,
'r': 444,
'registered': 747,
'ring': 333,
's': 389,
'scaron': 389,
'section': 500,
'semicolon': 333,
'seven': 500,
'six': 500,
'slash': 278,
'space': 250,
'sterling': 500,
't': 333,
'thorn': 556,
'three': 500,
'threequarters': 750,
'threesuperior': 300,
'tilde': 333,
'trademark': 1000,
'two': 500,
'twosuperior': 300,
'u': 556,
'uacute': 556,
'ucircumflex': 556,
'udieresis': 556,
'ugrave': 556,
'underscore': 500,
'v': 500,
'w': 722,
'x': 500,
'y': 500,
'yacute': 500,
'ydieresis': 500,
'yen': 500,
'z': 444,
'zcaron': 444,
'zero': 500}
widthsByFontGlyph['Times-Italic'] = {'A': 611,
'AE': 889,
'Aacute': 611,
'Acircumflex': 611,
'Adieresis': 611,
'Agrave': 611,
'Aring': 611,
'Atilde': 611,
'B': 611,
'C': 667,
'Ccedilla': 667,
'D': 722,
'E': 611,
'Eacute': 611,
'Ecircumflex': 611,
'Edieresis': 611,
'Egrave': 611,
'Eth': 722,
'Euro': 500,
'F': 611,
'G': 722,
'H': 722,
'I': 333,
'Iacute': 333,
'Icircumflex': 333,
'Idieresis': 333,
'Igrave': 333,
'J': 444,
'K': 667,
'L': 556,
'Lslash': 556,
'M': 833,
'N': 667,
'Ntilde': 667,
'O': 722,
'OE': 944,
'Oacute': 722,
'Ocircumflex': 722,
'Odieresis': 722,
'Ograve': 722,
'Oslash': 722,
'Otilde': 722,
'P': 611,
'Q': 722,
'R': 611,
'S': 500,
'Scaron': 500,
'T': 556,
'Thorn': 611,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 611,
'W': 833,
'X': 611,
'Y': 556,
'Yacute': 556,
'Ydieresis': 556,
'Z': 556,
'Zcaron': 556,
'a': 500,
'aacute': 500,
'acircumflex': 500,
'acute': 333,
'adieresis': 500,
'ae': 667,
'agrave': 500,
'ampersand': 778,
'aring': 500,
'asciicircum': 422,
'asciitilde': 541,
'asterisk': 500,
'at': 920,
'atilde': 500,
'b': 500,
'backslash': 278,
'bar': 275,
'braceleft': 400,
'braceright': 400,
'bracketleft': 389,
'bracketright': 389,
'breve': 333,
'brokenbar': 275,
'bullet': 350,
'c': 444,
'caron': 333,
'ccedilla': 444,
'cedilla': 333,
'cent': 500,
'circumflex': 333,
'colon': 333,
'comma': 250,
'copyright': 760,
'currency': 500,
'd': 500,
'dagger': 500,
'daggerdbl': 500,
'degree': 400,
'dieresis': 333,
'divide': 675,
'dollar': 500,
'dotaccent': 333,
'dotlessi': 278,
'e': 444,
'eacute': 444,
'ecircumflex': 444,
'edieresis': 444,
'egrave': 444,
'eight': 500,
'ellipsis': 889,
'emdash': 889,
'endash': 500,
'equal': 675,
'eth': 500,
'exclam': 333,
'exclamdown': 389,
'f': 278,
'fi': 500,
'five': 500,
'fl': 500,
'florin': 500,
'four': 500,
'fraction': 167,
'g': 500,
'germandbls': 500,
'grave': 333,
'greater': 675,
'guillemotleft': 500,
'guillemotright': 500,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 500,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 278,
'k': 444,
'l': 278,
'less': 675,
'logicalnot': 675,
'lslash': 278,
'm': 722,
'macron': 333,
'minus': 675,
'mu': 500,
'multiply': 675,
'n': 500,
'nine': 500,
'ntilde': 500,
'numbersign': 500,
'o': 500,
'oacute': 500,
'ocircumflex': 500,
'odieresis': 500,
'oe': 667,
'ogonek': 333,
'ograve': 500,
'one': 500,
'onehalf': 750,
'onequarter': 750,
'onesuperior': 300,
'ordfeminine': 276,
'ordmasculine': 310,
'oslash': 500,
'otilde': 500,
'p': 500,
'paragraph': 523,
'parenleft': 333,
'parenright': 333,
'percent': 833,
'period': 250,
'periodcentered': 250,
'perthousand': 1000,
'plus': 675,
'plusminus': 675,
'q': 500,
'question': 500,
'questiondown': 500,
'quotedbl': 420,
'quotedblbase': 556,
'quotedblleft': 556,
'quotedblright': 556,
'quoteleft': 333,
'quoteright': 333,
'quotesinglbase': 333,
'quotesingle': 214,
'r': 389,
'registered': 760,
'ring': 333,
's': 389,
'scaron': 389,
'section': 500,
'semicolon': 333,
'seven': 500,
'six': 500,
'slash': 278,
'space': 250,
'sterling': 500,
't': 278,
'thorn': 500,
'three': 500,
'threequarters': 750,
'threesuperior': 300,
'tilde': 333,
'trademark': 980,
'two': 500,
'twosuperior': 300,
'u': 500,
'uacute': 500,
'ucircumflex': 500,
'udieresis': 500,
'ugrave': 500,
'underscore': 500,
'v': 444,
'w': 667,
'x': 444,
'y': 444,
'yacute': 444,
'ydieresis': 444,
'yen': 500,
'z': 389,
'zcaron': 389,
'zero': 500}
widthsByFontGlyph['Times-BoldItalic'] = {'A': 667,
'AE': 944,
'Aacute': 667,
'Acircumflex': 667,
'Adieresis': 667,
'Agrave': 667,
'Aring': 667,
'Atilde': 667,
'B': 667,
'C': 667,
'Ccedilla': 667,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 500,
'F': 667,
'G': 722,
'H': 778,
'I': 389,
'Iacute': 389,
'Icircumflex': 389,
'Idieresis': 389,
'Igrave': 389,
'J': 500,
'K': 667,
'L': 611,
'Lslash': 611,
'M': 889,
'N': 722,
'Ntilde': 722,
'O': 722,
'OE': 944,
'Oacute': 722,
'Ocircumflex': 722,
'Odieresis': 722,
'Ograve': 722,
'Oslash': 722,
'Otilde': 722,
'P': 611,
'Q': 722,
'R': 667,
'S': 556,
'Scaron': 556,
'T': 611,
'Thorn': 611,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 667,
'W': 889,
'X': 667,
'Y': 611,
'Yacute': 611,
'Ydieresis': 611,
'Z': 611,
'Zcaron': 611,
'a': 500,
'aacute': 500,
'acircumflex': 500,
'acute': 333,
'adieresis': 500,
'ae': 722,
'agrave': 500,
'ampersand': 778,
'aring': 500,
'asciicircum': 570,
'asciitilde': 570,
'asterisk': 500,
'at': 832,
'atilde': 500,
'b': 500,
'backslash': 278,
'bar': 220,
'braceleft': 348,
'braceright': 348,
'bracketleft': 333,
'bracketright': 333,
'breve': 333,
'brokenbar': 220,
'bullet': 350,
'c': 444,
'caron': 333,
'ccedilla': 444,
'cedilla': 333,
'cent': 500,
'circumflex': 333,
'colon': 333,
'comma': 250,
'copyright': 747,
'currency': 500,
'd': 500,
'dagger': 500,
'daggerdbl': 500,
'degree': 400,
'dieresis': 333,
'divide': 570,
'dollar': 500,
'dotaccent': 333,
'dotlessi': 278,
'e': 444,
'eacute': 444,
'ecircumflex': 444,
'edieresis': 444,
'egrave': 444,
'eight': 500,
'ellipsis': 1000,
'emdash': 1000,
'endash': 500,
'equal': 570,
'eth': 500,
'exclam': 389,
'exclamdown': 389,
'f': 333,
'fi': 556,
'five': 500,
'fl': 556,
'florin': 500,
'four': 500,
'fraction': 167,
'g': 500,
'germandbls': 500,
'grave': 333,
'greater': 570,
'guillemotleft': 500,
'guillemotright': 500,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 556,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 278,
'k': 500,
'l': 278,
'less': 570,
'logicalnot': 606,
'lslash': 278,
'm': 778,
'macron': 333,
'minus': 606,
'mu': 576,
'multiply': 570,
'n': 556,
'nine': 500,
'ntilde': 556,
'numbersign': 500,
'o': 500,
'oacute': 500,
'ocircumflex': 500,
'odieresis': 500,
'oe': 722,
'ogonek': 333,
'ograve': 500,
'one': 500,
'onehalf': 750,
'onequarter': 750,
'onesuperior': 300,
'ordfeminine': 266,
'ordmasculine': 300,
'oslash': 500,
'otilde': 500,
'p': 500,
'paragraph': 500,
'parenleft': 333,
'parenright': 333,
'percent': 833,
'period': 250,
'periodcentered': 250,
'perthousand': 1000,
'plus': 570,
'plusminus': 570,
'q': 500,
'question': 500,
'questiondown': 500,
'quotedbl': 555,
'quotedblbase': 500,
'quotedblleft': 500,
'quotedblright': 500,
'quoteleft': 333,
'quoteright': 333,
'quotesinglbase': 333,
'quotesingle': 278,
'r': 389,
'registered': 747,
'ring': 333,
's': 389,
'scaron': 389,
'section': 500,
'semicolon': 333,
'seven': 500,
'six': 500,
'slash': 278,
'space': 250,
'sterling': 500,
't': 278,
'thorn': 500,
'three': 500,
'threequarters': 750,
'threesuperior': 300,
'tilde': 333,
'trademark': 1000,
'two': 500,
'twosuperior': 300,
'u': 556,
'uacute': 556,
'ucircumflex': 556,
'udieresis': 556,
'ugrave': 556,
'underscore': 500,
'v': 444,
'w': 667,
'x': 500,
'y': 444,
'yacute': 444,
'ydieresis': 444,
'yen': 500,
'z': 389,
'zcaron': 389,
'zero': 500}
widthsByFontGlyph['Symbol'] = {'Alpha': 722,
'Beta': 667,
'Chi': 722,
'Delta': 612,
'Epsilon': 611,
'Eta': 722,
'Euro': 750,
'Gamma': 603,
'Ifraktur': 686,
'Iota': 333,
'Kappa': 722,
'Lambda': 686,
'Mu': 889,
'Nu': 722,
'Omega': 768,
'Omicron': 722,
'Phi': 763,
'Pi': 768,
'Psi': 795,
'Rfraktur': 795,
'Rho': 556,
'Sigma': 592,
'Tau': 611,
'Theta': 741,
'Upsilon': 690,
'Upsilon1': 620,
'Xi': 645,
'Zeta': 611,
'aleph': 823,
'alpha': 631,
'ampersand': 778,
'angle': 768,
'angleleft': 329,
'angleright': 329,
'apple': 790,
'approxequal': 549,
'arrowboth': 1042,
'arrowdblboth': 1042,
'arrowdbldown': 603,
'arrowdblleft': 987,
'arrowdblright': 987,
'arrowdblup': 603,
'arrowdown': 603,
'arrowhorizex': 1000,
'arrowleft': 987,
'arrowright': 987,
'arrowup': 603,
'arrowvertex': 603,
'asteriskmath': 500,
'bar': 200,
'beta': 549,
'braceex': 494,
'braceleft': 480,
'braceleftbt': 494,
'braceleftmid': 494,
'bracelefttp': 494,
'braceright': 480,
'bracerightbt': 494,
'bracerightmid': 494,
'bracerighttp': 494,
'bracketleft': 333,
'bracketleftbt': 384,
'bracketleftex': 384,
'bracketlefttp': 384,
'bracketright': 333,
'bracketrightbt': 384,
'bracketrightex': 384,
'bracketrighttp': 384,
'bullet': 460,
'carriagereturn': 658,
'chi': 549,
'circlemultiply': 768,
'circleplus': 768,
'club': 753,
'colon': 278,
'comma': 250,
'congruent': 549,
'copyrightsans': 790,
'copyrightserif': 790,
'degree': 400,
'delta': 494,
'diamond': 753,
'divide': 549,
'dotmath': 250,
'eight': 500,
'element': 713,
'ellipsis': 1000,
'emptyset': 823,
'epsilon': 439,
'equal': 549,
'equivalence': 549,
'eta': 603,
'exclam': 333,
'existential': 549,
'five': 500,
'florin': 500,
'four': 500,
'fraction': 167,
'gamma': 411,
'gradient': 713,
'greater': 549,
'greaterequal': 549,
'heart': 753,
'infinity': 713,
'integral': 274,
'integralbt': 686,
'integralex': 686,
'integraltp': 686,
'intersection': 768,
'iota': 329,
'kappa': 549,
'lambda': 549,
'less': 549,
'lessequal': 549,
'logicaland': 603,
'logicalnot': 713,
'logicalor': 603,
'lozenge': 494,
'minus': 549,
'minute': 247,
'mu': 576,
'multiply': 549,
'nine': 500,
'notelement': 713,
'notequal': 549,
'notsubset': 713,
'nu': 521,
'numbersign': 500,
'omega': 686,
'omega1': 713,
'omicron': 549,
'one': 500,
'parenleft': 333,
'parenleftbt': 384,
'parenleftex': 384,
'parenlefttp': 384,
'parenright': 333,
'parenrightbt': 384,
'parenrightex': 384,
'parenrighttp': 384,
'partialdiff': 494,
'percent': 833,
'period': 250,
'perpendicular': 658,
'phi': 521,
'phi1': 603,
'pi': 549,
'plus': 549,
'plusminus': 549,
'product': 823,
'propersubset': 713,
'propersuperset': 713,
'proportional': 713,
'psi': 686,
'question': 444,
'radical': 549,
'radicalex': 500,
'reflexsubset': 713,
'reflexsuperset': 713,
'registersans': 790,
'registerserif': 790,
'rho': 549,
'second': 411,
'semicolon': 278,
'seven': 500,
'sigma': 603,
'sigma1': 439,
'similar': 549,
'six': 500,
'slash': 278,
'space': 250,
'spade': 753,
'suchthat': 439,
'summation': 713,
'tau': 439,
'therefore': 863,
'theta': 521,
'theta1': 631,
'three': 500,
'trademarksans': 786,
'trademarkserif': 890,
'two': 500,
'underscore': 500,
'union': 768,
'universal': 713,
'upsilon': 576,
'weierstrass': 987,
'xi': 493,
'zero': 500,
'zeta': 494}
widthsByFontGlyph['ZapfDingbats'] = {'a1': 974,
'a10': 692,
'a100': 668,
'a101': 732,
'a102': 544,
'a103': 544,
'a104': 910,
'a105': 911,
'a106': 667,
'a107': 760,
'a108': 760,
'a109': 626,
'a11': 960,
'a110': 694,
'a111': 595,
'a112': 776,
'a117': 690,
'a118': 791,
'a119': 790,
'a12': 939,
'a120': 788,
'a121': 788,
'a122': 788,
'a123': 788,
'a124': 788,
'a125': 788,
'a126': 788,
'a127': 788,
'a128': 788,
'a129': 788,
'a13': 549,
'a130': 788,
'a131': 788,
'a132': 788,
'a133': 788,
'a134': 788,
'a135': 788,
'a136': 788,
'a137': 788,
'a138': 788,
'a139': 788,
'a14': 855,
'a140': 788,
'a141': 788,
'a142': 788,
'a143': 788,
'a144': 788,
'a145': 788,
'a146': 788,
'a147': 788,
'a148': 788,
'a149': 788,
'a15': 911,
'a150': 788,
'a151': 788,
'a152': 788,
'a153': 788,
'a154': 788,
'a155': 788,
'a156': 788,
'a157': 788,
'a158': 788,
'a159': 788,
'a16': 933,
'a160': 894,
'a161': 838,
'a162': 924,
'a163': 1016,
'a164': 458,
'a165': 924,
'a166': 918,
'a167': 927,
'a168': 928,
'a169': 928,
'a17': 945,
'a170': 834,
'a171': 873,
'a172': 828,
'a173': 924,
'a174': 917,
'a175': 930,
'a176': 931,
'a177': 463,
'a178': 883,
'a179': 836,
'a18': 974,
'a180': 867,
'a181': 696,
'a182': 874,
'a183': 760,
'a184': 946,
'a185': 865,
'a186': 967,
'a187': 831,
'a188': 873,
'a189': 927,
'a19': 755,
'a190': 970,
'a191': 918,
'a192': 748,
'a193': 836,
'a194': 771,
'a195': 888,
'a196': 748,
'a197': 771,
'a198': 888,
'a199': 867,
'a2': 961,
'a20': 846,
'a200': 696,
'a201': 874,
'a202': 974,
'a203': 762,
'a204': 759,
'a205': 509,
'a206': 410,
'a21': 762,
'a22': 761,
'a23': 571,
'a24': 677,
'a25': 763,
'a26': 760,
'a27': 759,
'a28': 754,
'a29': 786,
'a3': 980,
'a30': 788,
'a31': 788,
'a32': 790,
'a33': 793,
'a34': 794,
'a35': 816,
'a36': 823,
'a37': 789,
'a38': 841,
'a39': 823,
'a4': 719,
'a40': 833,
'a41': 816,
'a42': 831,
'a43': 923,
'a44': 744,
'a45': 723,
'a46': 749,
'a47': 790,
'a48': 792,
'a49': 695,
'a5': 789,
'a50': 776,
'a51': 768,
'a52': 792,
'a53': 759,
'a54': 707,
'a55': 708,
'a56': 682,
'a57': 701,
'a58': 826,
'a59': 815,
'a6': 494,
'a60': 789,
'a61': 789,
'a62': 707,
'a63': 687,
'a64': 696,
'a65': 689,
'a66': 786,
'a67': 787,
'a68': 713,
'a69': 791,
'a7': 552,
'a70': 785,
'a71': 791,
'a72': 873,
'a73': 761,
'a74': 762,
'a75': 759,
'a76': 892,
'a77': 892,
'a78': 788,
'a79': 784,
'a8': 537,
'a81': 438,
'a82': 138,
'a83': 277,
'a84': 415,
'a85': 509,
'a86': 410,
'a87': 234,
'a88': 234,
'a89': 390,
'a9': 577,
'a90': 390,
'a91': 276,
'a92': 276,
'a93': 317,
'a94': 317,
'a95': 334,
'a96': 334,
'a97': 392,
'a98': 392,
'a99': 668,
'space': 278}
#preserve the initial values here
def _reset(
initial_dicts=dict(
ascent_descent=ascent_descent.copy(),
fontsByBaseEnc=fontsByBaseEnc.copy(),
fontsByName=fontsByName.copy(),
standardFontAttributes=standardFontAttributes.copy(),
widthVectorsByFont=widthVectorsByFont.copy(),
widthsByFontGlyph=widthsByFontGlyph.copy(),
)
):
for k,v in initial_dicts.iteritems():
d=globals()[k]
d.clear()
d.update(v)
from reportlab.rl_config import register_reset
register_reset(_reset)
del register_reset
|
alexissmirnov/donomo
|
donomo_archive/lib/reportlab/pdfbase/_fontdata.py
|
Python
|
bsd-3-clause
| 61,719 | 0.04992 |
"""Th tests for the Rfxtrx component."""
# pylint: disable=too-many-public-methods,protected-access
import unittest
import time
from homeassistant.bootstrap import _setup_component
from homeassistant.components import rfxtrx as rfxtrx
from tests.common import get_test_home_assistant
class TestRFXTRX(unittest.TestCase):
"""Test the Rfxtrx component."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant(0)
def tearDown(self):
"""Stop everything that was started."""
rfxtrx.RECEIVED_EVT_SUBSCRIBERS = []
rfxtrx.RFX_DEVICES = {}
if rfxtrx.RFXOBJECT:
rfxtrx.RFXOBJECT.close_connection()
self.hass.stop()
def test_default_config(self):
"""Test configuration."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}
}))
self.assertTrue(_setup_component(self.hass, 'sensor', {
'sensor': {'platform': 'rfxtrx',
'automatic_add': True,
'devices': {}}}))
while len(rfxtrx.RFX_DEVICES) < 1:
time.sleep(0.1)
self.assertEqual(len(rfxtrx.RFXOBJECT.sensors()), 1)
def test_valid_config(self):
"""Test configuration."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}}))
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True,
'debug': True}}))
def test_invalid_config(self):
"""Test configuration."""
self.assertFalse(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {}
}))
self.assertFalse(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'invalid_key': True}}))
def test_fire_event(self):
"""Test fire event."""
self.assertTrue(_setup_component(self.hass, 'rfxtrx', {
'rfxtrx': {
'device': '/dev/serial/by-id/usb' +
'-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0',
'dummy': True}
}))
self.assertTrue(_setup_component(self.hass, 'switch', {
'switch': {'platform': 'rfxtrx',
'automatic_add': True,
'devices':
{'0b1100cd0213c7f210010f51': {
'name': 'Test',
rfxtrx.ATTR_FIREEVENT: True}
}}}))
calls = []
def record_event(event):
"""Add recorded event to set."""
calls.append(event)
self.hass.bus.listen(rfxtrx.EVENT_BUTTON_PRESSED, record_event)
entity = rfxtrx.RFX_DEVICES['213c7f216']
self.assertEqual('Test', entity.name)
self.assertEqual('off', entity.state)
self.assertTrue(entity.should_fire_event)
event = rfxtrx.get_rfx_object('0b1100cd0213c7f210010f51')
event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18,
0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70])
rfxtrx.RECEIVED_EVT_SUBSCRIBERS[0](event)
self.hass.pool.block_till_done()
self.assertEqual(event.values['Command'], "On")
self.assertEqual('on', entity.state)
self.assertEqual(1, len(rfxtrx.RFX_DEVICES))
self.assertEqual(1, len(calls))
self.assertEqual(calls[0].data,
{'entity_id': 'switch.test', 'state': 'on'})
|
Zyell/home-assistant
|
tests/components/test_rfxtrx.py
|
Python
|
mit
| 4,095 | 0 |
# -*- coding: utf-8 -*-
import click
from ..models import Post
@click.command()
@click.option('--title', default=None, help='Title of the Post')
def cli(title):
"Prints a list of posts"
posts = Post.objects
if title:
posts = posts(title=title)
for post in posts:
click.echo(post)
|
seraphln/wheel
|
wheel/modules/posts/commands/listposts.py
|
Python
|
gpl-3.0
| 316 | 0 |
# -*- coding: utf-8 -*-
'''Custom exceptions for the framework.'''
import copy
import httplib as http
from flask import request
class FrameworkError(Exception):
"""Base class from which framework-related errors inherit."""
pass
class HTTPError(FrameworkError):
error_msgs = {
http.BAD_REQUEST: {
'message_short': 'Bad request',
'message_long': ('If this should not have occurred and the issue persists, '
'please report it to <a href="mailto:support@osf.io">support@osf.io</a>.'),
},
http.UNAUTHORIZED: {
'message_short': 'Unauthorized',
'message_long': 'You must <a href="/login/">log in</a> to access this resource.',
},
http.FORBIDDEN: {
'message_short': 'Forbidden',
'message_long': ('You do not have permission to perform this action. '
'If this should not have occurred and the issue persists, '
'please report it to <a href="mailto:support@osf.io">support@osf.io</a>.'),
},
http.NOT_FOUND: {
'message_short': 'Page not found',
'message_long': ('The requested resource could not be found. If this '
'should not have occurred and the issue persists, please report it '
'to <a href="mailto:support@osf.io">support@osf.io</a>.'),
},
http.GONE: {
'message_short': 'Resource deleted',
'message_long': ('The requested resource has been deleted. If this should '
'not have occurred and the issue persists, please report it to '
'<a href="mailto:support@osf.io">support@osf.io</a>.'),
},
}
def __init__(self, code, message=None, redirect_url=None, data=None):
super(HTTPError, self).__init__(message)
self.code = code
self.redirect_url = redirect_url
self.data = data or {}
try:
self.referrer = request.referrer
except RuntimeError:
self.referrer = None
def to_data(self):
data = copy.deepcopy(self.data)
if self.code in self.error_msgs:
data = {
'message_short': self.error_msgs[self.code]['message_short'],
'message_long': self.error_msgs[self.code]['message_long']
}
else:
data['message_short'] = 'Unable to resolve'
data['message_long'] = ('OSF was unable to resolve your request. If this '
'issue persists, please report it to '
'<a href="mailto:support@osf.io">support@osf.io</a>.')
data.update(self.data)
data['code'] = self.code
data['referrer'] = self.referrer
return data
class PermissionsError(FrameworkError):
"""Raised if an action cannot be performed due to insufficient permissions
"""
pass
|
AndrewSallans/osf.io
|
framework/exceptions/__init__.py
|
Python
|
apache-2.0
| 2,900 | 0.007241 |
from __future__ import print_function, absolute_import
from pymatgen.io.adf import AdfKey, AdfTask, AdfOutput, AdfInput
from pymatgen.core.structure import Molecule
import unittest
import os
from os.path import join
__author__ = 'Xin Chen, chenxin13@mails.tsinghua.edu.cn'
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files', 'molecules')
geometry_string = """GEOMETRY
smooth conservepoints
optim all cartesian
iterations 250
step rad=0.15 angle=10.0
hessupd BFGS
converge e=0.001 grad=0.0003 rad=0.01 angle=0.5
END
"""
zlmfit_string = """ZLMFIT
AtomDepQuality
10 good
12 normal
subend
END
"""
atoms_string = """ATOMS
O -0.90293455 0.66591421 0.00000000
H 0.05706545 0.66591421 0.00000000
H -1.22338913 1.57085004 0.00000000
END
"""
h2oxyz = """3
0.0
O -0.90293455 0.66591421 0.0
H 0.05706545 0.66591421 0.0
H -1.22338913 1.57085004 0.0
"""
rhb18xyz = """19
0.0
Rh -0.453396 -0.375115 0.000000
B 0.168139 3.232791 0.000000
B -0.270938 1.639058 0.000000
B 0.206283 2.604044 1.459430
B 0.404410 1.880136 2.866764
B -0.103309 0.887485 1.655272
B 0.436856 0.371367 3.299887
B 0.016593 -0.854959 1.930982
B 0.563233 -1.229713 3.453066
B 0.445855 -2.382027 2.415013
B 0.206283 2.604044 -1.459430
B 0.404410 1.880136 -2.866764
B -0.103309 0.887485 -1.655272
B 0.436856 0.371367 -3.299887
B 0.563233 -1.229713 -3.453066
B 0.016593 -0.854959 -1.930982
B 0.200456 -2.309538 -0.836316
B 0.200456 -2.309538 0.836316
B 0.445855 -2.382027 -2.415013
"""
def readfile(file_object):
"""
Return the content of the file as a string.
Parameters
----------
file_object : file or str
The file to read. This can be either a File object or a file path.
Returns
-------
content : str
The content of the file.
"""
if hasattr(file_object, "read"):
return file_object.read()
elif isinstance(file_object, str):
f = open(file_object, "r")
content = f.read()
f.close()
return content
else:
raise ValueError("``file_object`` must be a string or a file object!")
class AdfKeyTest(unittest.TestCase):
def test_simple(self):
unrestricted = AdfKey("unrestricted")
self.assertEqual(str(unrestricted).strip(), 'UNRESTRICTED')
def test_options(self):
charge = AdfKey("charge", [-1, 0])
charge_string = "CHARGE -1 0\n"
self.assertEqual(str(charge), "CHARGE -1 0\n")
self.assertEqual(str(AdfKey.from_dict(charge.as_dict())), charge_string)
def test_subkeys(self):
smooth = AdfKey("smooth", ["conservepoints"])
optim = AdfKey("optim", ["all", "cartesian"])
iterations = AdfKey("iterations", [250])
step = AdfKey("step", [("rad", 0.15), ("angle", 10.0)])
hessupd = AdfKey("hessupd", ["BFGS"])
converge = AdfKey("converge", [("e", 1.0e-3), ("grad", 3.0e-4),
("rad", 1.0e-2), ("angle", 0.5)])
geo = AdfKey("geometry", subkeys=[smooth, optim, iterations, step,
hessupd, converge])
self.assertEqual(str(geo), geometry_string)
self.assertEqual(str(AdfKey.from_dict(geo.as_dict())), geometry_string)
self.assertTrue(geo.has_subkey("optim"))
def test_end(self):
geo = AdfKey("Geometry")
self.assertEqual(str(geo), "GEOMETRY\nEND\n")
def test_subkeys_subkeys(self):
atom_dep_quality = AdfKey("AtomDepQuality",
subkeys=[AdfKey("10", ["good"]),
AdfKey("12", ["normal"])])
zlmfit = AdfKey("zlmfit", subkeys=[atom_dep_quality])
self.assertEqual(str(zlmfit), zlmfit_string)
self.assertEqual(str(AdfKey.from_dict(zlmfit.as_dict())), zlmfit_string)
def test_from_string(self):
k1 = AdfKey.from_string("CHARGE -1 0")
self.assertEqual(k1.key, "CHARGE")
self.assertListEqual(k1.options, [-1, 0])
k2 = AdfKey.from_string("step rad=0.15 angle=10.0")
self.assertEqual(k2.key, "step")
self.assertListEqual(k2.options[0], ['rad', 0.15])
self.assertListEqual(k2.options[1], ['angle', 10.0])
k3 = AdfKey.from_string("GEOMETRY\noptim all\niterations 100\nEND\n")
self.assertEqual(k3.key, "GEOMETRY")
self.assertEqual(k3.subkeys[0].options[0], "all")
self.assertEqual(k3.subkeys[1].options[0], 100)
k4 = AdfKey.from_string(
"""SCF
iterations 300
converge 1.0e-7 1.0e-7
mixing 0.2
diis n=100 ok=0.0001 cyc=100 cx=5.0 cxx=10.0
END"""
)
self.assertEqual(k4.key, "SCF")
self.assertEqual(k4.subkeys[0].key, "iterations")
self.assertEqual(k4.subkeys[1].key, "converge")
self.assertEqual(k4.subkeys[1].options[0], 1E-7)
self.assertEqual(k4.subkeys[2].options[0], 0.2)
def test_option_operations(self):
k1 = AdfKey("Charge", [-1, 0])
k1.add_option(2)
self.assertListEqual(k1.options, [-1, 0, 2])
k1.remove_option(0)
self.assertListEqual(k1.options, [0, 2])
k2 = AdfKey.from_string("step rad=0.15 angle=10.0")
k2.add_option(["length", 0.1])
self.assertListEqual(k2.options[2], ["length", 0.1])
k2.remove_option("rad")
self.assertListEqual(k2.options[0], ["angle", 10.0])
def test_atom_block_key(self):
block = AdfKey("atoms")
o = Molecule.from_str(h2oxyz, "xyz")
for site in o:
block.add_subkey(AdfKey(str(site.specie), list(site.coords)))
self.assertEqual(str(block), atoms_string)
energy_task = """TITLE ADF_RUN
UNITS
length angstrom
angle degree
END
XC
GGA PBE
END
BASIS
type DZ
core small
END
SCF
iterations 300
END
GEOMETRY SinglePoint
END
"""
class AdfTaskTest(unittest.TestCase):
def test_energy(self):
task = AdfTask()
self.assertEqual(str(task), energy_task)
def test_serialization(self):
task = AdfTask()
o = AdfTask.from_dict(task.as_dict())
self.assertEqual(task.title, o.title)
self.assertEqual(task.basis_set, o.basis_set)
self.assertEqual(task.scf, o.scf)
self.assertEqual(task.geo, o.geo)
self.assertEqual(task.operation, o.operation)
self.assertEqual(task.units, o.units)
self.assertEqual(str(task), str(o))
rhb18 = {"title": "RhB18",
"basis_set": AdfKey.from_string("BASIS\ntype TZP\ncore small\nEND"),
"xc": AdfKey.from_string("XC\nHybrid PBE0\nEND"),
"units": AdfKey.from_string("UNITS\nlength angstrom\nEND"),
"other_directives": [AdfKey.from_string("SYMMETRY"),
AdfKey.from_string("RELATIVISTIC scalar zora"),
AdfKey.from_string("INTEGRATION 6.0 6.0 6.0"),
AdfKey.from_string("SAVE TAPE21"),
AdfKey.from_string("A1FIT 10.0")],
"geo_subkeys": [AdfKey.from_string("optim all"),
AdfKey.from_string("iterations 300"),
AdfKey.from_string("step rad=0.15 angle=10.0"),
AdfKey.from_string("hessupd BFGS")],
"scf": AdfKey.from_string(
"""SCF
iterations 300
converge 1.0e-7 1.0e-7
mixing 0.2
lshift 0.0
diis n=100 ok=0.0001 cyc=100 cx=5.0 cxx=10.0
END"""
)}
class AdfInputTest(unittest.TestCase):
def setUp(self):
self.tempfile = "./adf.temp"
def test_main(self):
o = Molecule.from_str(rhb18xyz, "xyz")
o.set_charge_and_spin(-1, 3)
task = AdfTask("optimize", **rhb18)
inp = AdfInput(task)
inp.write_file(o, self.tempfile)
s = readfile(join(test_dir, "adf", "RhB18_adf.inp"))
self.assertEqual(readfile(self.tempfile), s)
def tearDown(self):
if os.path.isfile(self.tempfile):
os.remove(self.tempfile)
class AdfOutputTest(unittest.TestCase):
def test_analytical_freq(self):
filename = join(test_dir, "adf", "analytical_freq", "adf.out")
o = AdfOutput(filename)
self.assertAlmostEqual(o.final_energy, -0.54340325)
self.assertEqual(len(o.energies), 4)
self.assertEqual(len(o.structures), 4)
self.assertAlmostEqual(o.frequencies[0], 1553.931)
self.assertAlmostEqual(o.frequencies[2], 3793.086)
self.assertAlmostEqual(o.normal_modes[0][2], 0.071)
self.assertAlmostEqual(o.normal_modes[0][6], 0.000)
self.assertAlmostEqual(o.normal_modes[0][7], -0.426)
self.assertAlmostEqual(o.normal_modes[0][8], -0.562)
def test_numerical_freq(self):
filename = join(test_dir, "adf", "numerical_freq", "adf.out")
o = AdfOutput(filename)
self.assertEqual(o.freq_type, 'Numerical')
self.assertEqual(o.final_structure.num_sites, 4)
self.assertEqual(len(o.frequencies), 6)
self.assertEqual(len(o.normal_modes), 6)
self.assertAlmostEqual(o.frequencies[0], 938.21)
self.assertAlmostEqual(o.frequencies[3], 3426.64)
self.assertAlmostEqual(o.frequencies[4], 3559.35)
self.assertAlmostEqual(o.frequencies[5], 3559.35)
self.assertAlmostEqual(o.normal_modes[1][0], 0.067)
self.assertAlmostEqual(o.normal_modes[1][3], -0.536)
self.assertAlmostEqual(o.normal_modes[1][7], 0.000)
self.assertAlmostEqual(o.normal_modes[1][9], -0.536)
def test_single_point(self):
filename = join(test_dir, "adf", "sp", "adf.out")
o = AdfOutput(filename)
self.assertAlmostEqual(o.final_energy, -0.74399276)
self.assertEqual(len(o.final_structure), 4)
if __name__ == "__main__":
unittest.main()
|
matk86/pymatgen
|
pymatgen/io/tests/test_adf.py
|
Python
|
mit
| 10,190 | 0.000196 |
# -*- coding: utf-8 -*-
# Copyright 2007-2021 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import itertools
import numpy as np
import pytest
from hyperspy.components1d import Exponential
from hyperspy.signals import Signal1D
from hyperspy.utils import stack
TRUE_FALSE_2_TUPLE = [p for p in itertools.product((True, False), repeat=2)]
def test_function():
g = Exponential()
g.A.value = 10000.
g.tau.value = 200.
test_value = 200.
test_result = g.A.value * np.exp(-test_value / g.tau.value)
np.testing.assert_allclose(g.function(0.), g.A.value)
np.testing.assert_allclose(g.function(test_value), test_result)
@pytest.mark.parametrize(("lazy"), (True, False))
@pytest.mark.parametrize(("uniform"), (True, False))
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters_binned(only_current, binned, lazy, uniform):
s = Signal1D(np.empty((100,)))
s.axes_manager.signal_axes[0].is_binned = binned
axis = s.axes_manager.signal_axes[0]
axis.scale = 0.2
axis.offset = 15.
g1 = Exponential(A=10005.7, tau=214.3)
s.data = g1.function(axis.axis)
if not uniform:
axis.convert_to_non_uniform_axis()
if lazy:
s = s.as_lazy()
g2 = Exponential()
if binned and uniform:
factor = axis.scale
elif binned:
factor = np.gradient(axis.axis)
else:
factor = 1
assert g2.estimate_parameters(s, axis.low_value, axis.high_value,
only_current=only_current)
assert g2._axes_manager[-1].is_binned == binned
np.testing.assert_allclose(g1.A.value, g2.A.value * factor, rtol=0.05)
np.testing.assert_allclose(g1.tau.value, g2.tau.value)
@pytest.mark.parametrize(("lazy"), (True, False))
@pytest.mark.parametrize(("binned"), (True, False))
def test_function_nd(binned, lazy):
s = Signal1D(np.empty((100,)))
axis = s.axes_manager.signal_axes[0]
axis.scale = 0.2
axis.offset = 15
g1 = Exponential(A=10005.7, tau=214.3)
s.data = g1.function(axis.axis)
s.axes_manager.signal_axes[0].is_binned = binned
s2 = stack([s] * 2)
if lazy:
s2 = s2.as_lazy()
g2 = Exponential()
factor = axis.scale if binned else 1.
g2.estimate_parameters(s2, axis.low_value, axis.high_value, False)
assert g2._axes_manager[-1].is_binned == binned
np.testing.assert_allclose(g2.function_nd(axis.axis) * factor, s2.data, rtol=0.05)
|
erh3cq/hyperspy
|
hyperspy/tests/component/test_exponential.py
|
Python
|
gpl-3.0
| 3,100 | 0.000323 |
import pytest
from webdriver.error import NoSuchElementException, StaleElementReferenceException
from tests.support.asserts import assert_error, assert_success
def refresh(session):
return session.transport.send(
"POST", "session/{session_id}/refresh".format(**vars(session)))
def test_null_response_value(session, inline):
session.url = inline("<div>")
response = refresh(session)
value = assert_success(response)
assert value is None
def test_no_top_browsing_context(session, closed_window):
response = refresh(session)
assert_error(response, "no such window")
def test_no_browsing_context(session, closed_frame, inline):
url = inline("<div id=foo>")
session.url = url
element = session.find.css("#foo", all=False)
response = refresh(session)
assert_success(response)
with pytest.raises(StaleElementReferenceException):
element.property("id")
assert session.url == url
assert session.find.css("#foo", all=False)
def test_basic(session, inline):
url = inline("<div id=foo>")
session.url = url
element = session.find.css("#foo", all=False)
response = refresh(session)
assert_success(response)
with pytest.raises(StaleElementReferenceException):
element.property("id")
assert session.url == url
assert session.find.css("#foo", all=False)
def test_dismissed_beforeunload(session, inline):
url_beforeunload = inline("""
<input type="text">
<script>
window.addEventListener("beforeunload", function (event) {
event.preventDefault();
});
</script>
""")
session.url = url_beforeunload
element = session.find.css("input", all=False)
element.send_keys("bar")
response = refresh(session)
assert_success(response)
with pytest.raises(StaleElementReferenceException):
element.property("id")
session.find.css("input", all=False)
def test_history_pushstate(session, inline):
pushstate_page = inline("""
<script>
function pushState() {
history.pushState({foo: "bar"}, "", "#pushstate");
}
</script>
<a onclick="javascript:pushState();">click</a>
""")
session.url = pushstate_page
session.find.css("a", all=False).click()
assert session.url == "{}#pushstate".format(pushstate_page)
assert session.execute_script("return history.state;") == {"foo": "bar"}
session.execute_script("""
let elem = window.document.createElement('div');
window.document.body.appendChild(elem);
""")
element = session.find.css("div", all=False)
response = refresh(session)
assert_success(response)
assert session.url == "{}#pushstate".format(pushstate_page)
assert session.execute_script("return history.state;") == {"foo": "bar"}
with pytest.raises(StaleElementReferenceException):
element.property("id")
def test_refresh_switches_to_parent_browsing_context(session, create_frame, inline):
session.url = inline("<div id=foo>")
session.switch_frame(create_frame())
with pytest.raises(NoSuchElementException):
session.find.css("#foo", all=False)
response = refresh(session)
assert_success(response)
session.find.css("#foo", all=False)
|
scheib/chromium
|
third_party/blink/web_tests/external/wpt/webdriver/tests/refresh/refresh.py
|
Python
|
bsd-3-clause
| 3,290 | 0.000608 |
#!/usr/bin/python
import os
import re
from lxml import etree as et
import pcbmode.config as config
from . import messages as msg
# pcbmode modules
from . import utils
from .point import Point
def makeExcellon(manufacturer='default'):
"""
"""
ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Open the board's SVG
svg_in = utils.openBoardSVG()
drills_layer = svg_in.find("//svg:g[@pcbmode:sheet='drills']",
namespaces=ns)
excellon = Excellon(drills_layer)
# Save to file
base_dir = os.path.join(config.cfg['base-dir'],
config.cfg['locations']['build'],
'production')
base_name = "%s_rev_%s" % (config.brd['config']['name'],
config.brd['config']['rev'])
filename_info = config.cfg['manufacturers'][manufacturer]['filenames']['drills']
add = '_%s.%s' % ('drills',
filename_info['plated'].get('ext') or 'txt')
filename = os.path.join(base_dir, base_name + add)
with open(filename, "wb") as f:
for line in excellon.getExcellon():
f.write(line)
class Excellon():
"""
"""
def __init__(self, svg):
"""
"""
self._svg = svg
self._ns = {'pcbmode':config.cfg['ns']['pcbmode'],
'svg':config.cfg['ns']['svg']}
# Get all drill paths except for the ones used in the
# drill-index
drill_paths = self._svg.findall(".//svg:g[@pcbmode:type='component-shapes']//svg:path",
namespaces=self._ns)
drills_dict = {}
for drill_path in drill_paths:
diameter = drill_path.get('{'+config.cfg['ns']['pcbmode']+'}diameter')
location = self._getLocation(drill_path)
if diameter not in drills_dict:
drills_dict[diameter] = {}
drills_dict[diameter]['locations'] = []
drills_dict[diameter]['locations'].append(location)
self._preamble = self._createPreamble()
self._content = self._createContent(drills_dict)
self._postamble = self._createPostamble()
def getExcellon(self):
return (self._preamble+
self._content+
self._postamble)
def _createContent(self, drills):
"""
"""
ex = []
for i, diameter in enumerate(drills):
# This is probably not necessary, but I'm not 100% certain
# that if the item order of a dict is gurenteed. If not
# the result can be quite devastating where drill
# diameters are wrong!
# Drill index must be greater than 0
drills[diameter]['index'] = i+1
ex.append("T%dC%s\n" % (i+1, diameter))
ex.append('M95\n') # End of a part program header
for diameter in drills:
ex.append("T%s\n" % drills[diameter]['index'])
for coord in drills[diameter]['locations']:
ex.append(self._getPoint(coord))
return ex
def _createPreamble(self):
"""
"""
ex = []
ex.append('M48\n') # Beginning of a part program header
ex.append('METRIC,TZ\n') # Metric, trailing zeros
ex.append('G90\n') # Absolute mode
ex.append('M71\n') # Metric measuring mode
return ex
def _createPostamble(self):
"""
"""
ex = []
ex.append('M30\n') # End of Program, rewind
return ex
def _getLocation(self, path):
"""
Returns the location of a path, factoring in all the transforms of
its ancestors, and its own transform
"""
location = Point()
# We need to get the transforms of all ancestors that have
# one in order to get the location correctly
ancestors = path.xpath("ancestor::*[@transform]")
for ancestor in ancestors:
transform = ancestor.get('transform')
transform_data = utils.parseTransform(transform)
# Add them up
location += transform_data['location']
# Add the transform of the path itself
transform = path.get('transform')
if transform != None:
transform_data = utils.parseTransform(transform)
location += transform_data['location']
return location
def _getPoint(self, point):
"""
Converts a Point type into an Excellon coordinate
"""
return "X%.6fY%.6f\n" % (point.x, -point.y)
|
ddm/pcbmode
|
pcbmode/utils/excellon.py
|
Python
|
mit
| 4,661 | 0.00708 |
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015-2020 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from datetime import datetime
import backtrader as bt
class MetaRollOver(bt.DataBase.__class__):
def __init__(cls, name, bases, dct):
'''Class has already been created ... register'''
# Initialize the class
super(MetaRollOver, cls).__init__(name, bases, dct)
def donew(cls, *args, **kwargs):
'''Intercept const. to copy timeframe/compression from 1st data'''
# Create the object and set the params in place
_obj, args, kwargs = super(MetaRollOver, cls).donew(*args, **kwargs)
if args:
_obj.p.timeframe = args[0]._timeframe
_obj.p.compression = args[0]._compression
return _obj, args, kwargs
class RollOver(bt.with_metaclass(MetaRollOver, bt.DataBase)):
'''Class that rolls over to the next future when a condition is met
Params:
- ``checkdate`` (default: ``None``)
This must be a *callable* with the following signature::
checkdate(dt, d):
Where:
- ``dt`` is a ``datetime.datetime`` object
- ``d`` is the current data feed for the active future
Expected Return Values:
- ``True``: as long as the callable returns this, a switchover can
happen to the next future
If a commodity expires on the 3rd Friday of March, ``checkdate`` could
return ``True`` for the entire week in which the expiration takes
place.
- ``False``: the expiration cannot take place
- ``checkcondition`` (default: ``None``)
**Note**: This will only be called if ``checkdate`` has returned
``True``
If ``None`` this will evaluate to ``True`` (execute roll over)
internally
Else this must be a *callable* with this signature::
checkcondition(d0, d1)
Where:
- ``d0`` is the current data feed for the active future
- ``d1`` is the data feed for the next expiration
Expected Return Values:
- ``True``: roll-over to the next future
Following with the example from ``checkdate``, this could say that the
roll-over can only happend if the *volume* from ``d0`` is already less
than the volume from ``d1``
- ``False``: the expiration cannot take place
'''
params = (
# ('rolls', []), # array of futures to roll over
('checkdate', None), # callable
('checkcondition', None), # callable
)
def islive(self):
'''Returns ``True`` to notify ``Cerebro`` that preloading and runonce
should be deactivated'''
return True
def __init__(self, *args):
self._rolls = args
def start(self):
super(RollOver, self).start()
for d in self._rolls:
d.setenvironment(self._env)
d._start()
# put the references in a separate list to have pops
self._ds = list(self._rolls)
self._d = self._ds.pop(0) if self._ds else None
self._dexp = None
self._dts = [datetime.min for xx in self._ds]
def stop(self):
super(RollOver, self).stop()
for d in self._rolls:
d.stop()
def _gettz(self):
'''To be overriden by subclasses which may auto-calculate the
timezone'''
if self._rolls:
return self._rolls[0]._gettz()
return bt.utils.date.Localizer(self.p.tz)
def _checkdate(self, dt, d):
if self.p.checkdate is not None:
return self.p.checkdate(dt, d)
return False
def _checkcondition(self, d0, d1):
if self.p.checkcondition is not None:
return self.p.checkcondition(d0, d1)
return True
def _load(self):
while self._d is not None:
_next = self._d.next()
if _next is None: # no values yet, more will come
continue
if _next is False: # no values from current data src
if self._ds:
self._d = self._ds.pop(0)
self._dts.pop(0)
else:
self._d = None
continue
dt0 = self._d.datetime.datetime() # current dt for active data
# Synchronize other datas using dt0
for i, d_dt in enumerate(zip(self._ds, self._dts)):
d, dt = d_dt
while dt < dt0:
if d.next() is None:
continue
self._dts[i] = dt = d.datetime.datetime()
# Move expired future as much as needed
while self._dexp is not None:
if not self._dexp.next():
self._dexp = None
break
if self._dexp.datetime.datetime() < dt0:
continue
if self._dexp is None and self._checkdate(dt0, self._d):
# rule has been met ... check other factors only if 2 datas
# still there
if self._ds and self._checkcondition(self._d, self._ds[0]):
# Time to switch to next data
self._dexp = self._d
self._d = self._ds.pop(0)
self._dts.pop(0)
# Fill the line and tell we die
self.lines.datetime[0] = self._d.lines.datetime[0]
self.lines.open[0] = self._d.lines.open[0]
self.lines.high[0] = self._d.lines.high[0]
self.lines.low[0] = self._d.lines.low[0]
self.lines.close[0] = self._d.lines.close[0]
self.lines.volume[0] = self._d.lines.volume[0]
self.lines.openinterest[0] = self._d.lines.openinterest[0]
return True
# Out of the loop -> self._d is None, no data feed to return from
return False
|
mementum/backtrader
|
backtrader/feeds/rollover.py
|
Python
|
gpl-3.0
| 6,892 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-23 08:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0003_auto_20171221_0336'),
]
operations = [
migrations.AlterField(
model_name='dailyproductivitylog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], max_length=50),
),
migrations.AlterField(
model_name='sleeplog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], max_length=50),
),
migrations.AlterField(
model_name='supplementlog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
migrations.AlterField(
model_name='useractivitylog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
migrations.AlterField(
model_name='usermoodlog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
]
|
jeffshek/betterself
|
events/migrations/0004_auto_20171223_0859.py
|
Python
|
mit
| 1,984 | 0.00252 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Movement',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('date_pub', models.DateField(verbose_name='Data inserimento', auto_now_add=True)),
('text', models.CharField(verbose_name='Descrizione', max_length=200)),
('amount', models.DecimalField(decimal_places=2, max_digits=10)),
('currency', models.CharField(choices=[('EUR', 'EUR'), ('USD', 'USD')], max_length=3)),
('owner', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('word', models.CharField(max_length=50)),
('slug', models.CharField(max_length=100)),
],
),
migrations.AddField(
model_name='movement',
name='tag',
field=models.ManyToManyField(to='saving.Tag'),
),
]
|
shelag/piggybank
|
saving/migrations/0001_initial.py
|
Python
|
mit
| 1,468 | 0.004087 |
###############################################################################
# Name: ed_cmdbar.py #
# Purpose: Creates a small slit panel that holds small controls for searching #
# and other actions. #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
This class creates a custom panel that can hide and show different controls
based an id value. The panel is generally between 24-32 pixels in height but
can grow to fit the controls inserted in it. The the background is painted with
a gradient using system defined colors.
@summary: The buffers CommandBar control with search/goto line/command entry
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: ed_cmdbar.py 67402 2011-04-06 13:34:14Z CJP $"
__revision__ = "$Revision: 67402 $"
#--------------------------------------------------------------------------#
# Imports
import os
import sys
import glob
import re
import wx
# Local Imports
import util
import ed_glob
import ed_search
import ed_event
import ed_msg
import ebmlib
import eclib
from profiler import Profile_Get, Profile_Set
_ = wx.GetTranslation
#--------------------------------------------------------------------------#
# Close Button Bitmap
from extern.embeddedimage import PyEmbeddedImage
XButton = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAA4AAAAOCAIAAACQKrqGAAAAA3NCSVQICAjb4U/gAAAB6UlE"
"QVQokW2SvWsTYRjAn7tcctdLc7kmxtJqj8ZECIqi7eJHhywVxLWLSBctXQKFOhSE0IJKB0EH"
"wf/BwUkEBxEFFR0cSoei1ZaSJjQ56Zn0vDd3977v8zrcUTr0mZ6PH8+39Onxw97Xz0FnL2w1"
"4DhJnbLU4ZHs1Snpza0bk1cum0MFLvwoJmg/pmjs6bW7a5u7StDZM8Zu0v0W3W/HAGMAgJxF"
"pmYaxumc+nNLDlsNUBKceNJAKj27KI2OIWfImWKVcnMPuKr53QOmJsVfWz7sSZ+pqZWJ7L26"
"YpUUq5SfX1YrE4XZRR+pwikAKAAgBBMQkPevkuMVWdPz88sAIGs6+sR5+/IwlwIA0CXBrsM2"
"toPm/ZMrz2RNBwD0SaO+4G/9AACeG41R9o8wL6CuLwXs6Jow5Mz1OSJ3XMG4DAAiZIgidfb8"
"yOrzqC76RNb08Scv9HMXAAAoFyGNx0Kk2dt3I25nqbazVIvo/J05ABAsAMTETEYrX7pIbNv7"
"8iFZLLeePiKbG7TT9ta+y7lCY7UuM6oNGZ1mW3p9fXKqeq3/xz6wm8yNz8MRIyWRSg6amfTg"
"wHqzp+hW0XUcI3NCy6QBQGAYNRfVZYgJztxeH3LDilmd/vXxHVn/5m3/PvZd0mfKulU0q9P/"
"AeP28JG84F5KAAAAAElFTkSuQmCC")
#-----------------------------------------------------------------------------#
# Globals
ID_CLOSE_BUTTON = wx.NewId()
ID_SEARCH_NEXT = wx.NewId()
ID_SEARCH_PRE = wx.NewId()
ID_FIND_ALL = wx.NewId()
ID_MATCH_CASE = wx.NewId()
ID_WHOLE_WORD = wx.NewId()
ID_REGEX = wx.NewId()
#-----------------------------------------------------------------------------#
class CommandBarBase(eclib.ControlBar):
"""Base class for control bars"""
def __init__(self, parent):
super(CommandBarBase, self).__init__(parent,
style=eclib.CTRLBAR_STYLE_GRADIENT)
if wx.Platform == '__WXGTK__':
self.SetWindowStyle(eclib.CTRLBAR_STYLE_DEFAULT)
self.SetVMargin(2, 2)
# Attributes
self._parent = parent
self._menu = None
self._menu_enabled = True
self.ctrl = None
self.close_b = eclib.PlateButton(self, ID_CLOSE_BUTTON,
bmp=XButton.GetBitmap(),
style=eclib.PB_STYLE_NOBG)
# Setup
self.AddControl(self.close_b, wx.ALIGN_LEFT)
# Event Handlers
self.Bind(wx.EVT_BUTTON, self.OnClose, self.close_b)
self.Bind(wx.EVT_CONTEXT_MENU, self.OnContext)
self.Bind(wx.EVT_MENU, self.OnContextMenu)
def OnClose(self, evt):
"""Handles events from the buttons on the bar
@param evt: Event that called this handler
"""
e_id = evt.GetId()
if e_id == ID_CLOSE_BUTTON:
self.Hide()
else:
evt.Skip()
def OnContext(self, evt):
"""Show the custom menu"""
if self._menu_enabled:
if self._menu is None:
# Lazy init the menu
self._menu = wx.Menu(_("Customize"))
# Ensure the label is disabled (wxMSW Bug)
item = self._menu.GetMenuItems()[0]
self._menu.Enable(item.GetId(), False)
to_menu = list()
for child in self.GetChildren():
if self.IsCustomizable(child):
to_menu.append(child)
if len(to_menu):
to_menu.sort(key=wx.Window.GetLabel)
for item in to_menu:
if not item.GetLabel():
continue
self._menu.Append(item.GetId(),
item.GetLabel(),
kind=wx.ITEM_CHECK)
self._menu.Check(item.GetId(), item.IsShown())
self.PopupMenu(self._menu)
else:
evt.Skip()
def OnContextMenu(self, evt):
"""Hide and Show controls"""
e_id = evt.GetId()
ctrl = self.FindWindowById(e_id)
if ctrl is not None:
self.ShowControl(ctrl.GetName(), not ctrl.IsShown())
self.Layout()
# Update the persistent configuration
key = self.GetConfigKey()
if key is not None:
cfg = Profile_Get('CTRLBAR', default=dict())
state = self.GetControlStates()
cfg[key] = state
def EnableMenu(self, enable=True):
"""Enable the popup customization menu
@keyword enable: bool
"""
self._menu_enabled = enable
if not enable and self._menu is not None:
self._menu.Destroy()
self._menu = None
def GetConfigKey(self):
"""Get the key to use for the layout config persistence.
@return: string
@note: override in subclasses
"""
return None
def GetControlStates(self):
"""Get the map of control name id's to their shown state True/False
@return: dict()
"""
state = dict()
for child in self.GetChildren():
if self.IsCustomizable(child):
state[child.GetName()] = child.IsShown()
return state
def SetControlStates(self, state):
"""Set visibility state of the customizable controls
@param state: dict(ctrl_name=bool)
"""
for name, show in state.iteritems():
self.ShowControl(name, show)
self.Layout()
def Hide(self):
"""Hides the control and notifies the parent
@postcondition: commandbar is hidden
@todo: don't reference nb directly here
"""
super(CommandBarBase, self).Hide()
self._parent.SendSizeEvent()
nb = self._parent.GetNotebook()
ctrl = nb.GetCurrentCtrl()
if ctrl:
ctrl.SetFocus()
return True
def ShowControl(self, ctrl_name, show=True):
"""Show/Hide a control
@param ctrl_name: string
@note: assumes all left aligned controls
"""
sizer = self.GetControlSizer()
next = False
for item in sizer.GetChildren():
if next:
if item.IsSpacer():
item.Show(show)
break
if item.Window and item.Window.GetName() == ctrl_name:
item.Show(show)
next = True
def IsCustomizable(self, ctrl):
"""Is the control of a type that can be customized
@param ctrl: wx.Window
@return: bool
"""
ok = (ctrl is not self.close_b)
ok = ok and (isinstance(ctrl, wx.CheckBox) or \
isinstance(ctrl, eclib.PlateButton))
return ok
def SetControl(self, ctrl):
"""Set the main control of this command bar
@param ctrl: window
"""
self.ctrl = ctrl
def SetFocus(self):
"""Set the focus to the bar and its main control"""
super(CommandBarBase, self).SetFocus()
if self.ctrl is not None:
self.ctrl.SetFocus()
#-----------------------------------------------------------------------------#
class SearchBar(CommandBarBase):
"""Commandbar for searching text in the current buffer."""
def __init__(self, parent):
super(SearchBar, self).__init__(parent)
# Attributes
self.SetControl(ed_search.EdSearchCtrl(self, wx.ID_ANY,
menulen=5, size=(180, -1)))
self._sctrl = self.ctrl.GetSearchController()
# Setup
f_lbl = wx.StaticText(self, label=_("Find") + u": ")
t_bmp = wx.ArtProvider.GetBitmap(str(ed_glob.ID_DOWN), wx.ART_MENU)
next_btn = eclib.PlateButton(self, ID_SEARCH_NEXT, _("Next"),
t_bmp, style=eclib.PB_STYLE_NOBG,
name="NextBtn")
self.AddControl(f_lbl, wx.ALIGN_LEFT)
self.AddControl(self.ctrl, wx.ALIGN_LEFT)
self.AddControl(next_btn, wx.ALIGN_LEFT)
t_bmp = wx.ArtProvider.GetBitmap(str(ed_glob.ID_UP), wx.ART_MENU)
pre_btn = eclib.PlateButton(self, ID_SEARCH_PRE, _("Previous"),
t_bmp, style=eclib.PB_STYLE_NOBG,
name="PreBtn")
self.AddControl(pre_btn, wx.ALIGN_LEFT)
t_bmp = wx.ArtProvider.GetBitmap(str(ed_glob.ID_FIND), wx.ART_MENU)
fa_btn = eclib.PlateButton(self, ID_FIND_ALL, _("Find All"),
t_bmp, style=eclib.PB_STYLE_NOBG,
name="FindAllBtn")
self.AddControl(fa_btn)
fa_btn.Show(False) # Hide this button by default
match_case = wx.CheckBox(self, ID_MATCH_CASE, _("Match Case"),
name="MatchCase")
match_case.SetValue(self.ctrl.IsMatchCase())
self.AddControl(match_case, wx.ALIGN_LEFT)
match_case.Show(False) # Hide by default
ww_cb = wx.CheckBox(self, ID_WHOLE_WORD,
_("Whole Word"), name="WholeWord")
ww_cb.SetValue(self.ctrl.IsWholeWord())
self.AddControl(ww_cb, wx.ALIGN_LEFT)
regex_cb = wx.CheckBox(self, ID_REGEX, _("Regular Expression"),
name="RegEx")
regex_cb.SetValue(self.ctrl.IsRegEx())
self.AddControl(regex_cb, wx.ALIGN_LEFT)
# HACK: workaround bug in mac control that resets size to
# that of the default variant after any text has been
# typed in it. Note it reports the best size as the default
# variant and causes layout issues. wxBUG
if wx.Platform == '__WXMAC__':
self.ctrl.SetSizeHints(180, 16, 180, 16)
# Event Handlers
self.Bind(wx.EVT_WINDOW_DESTROY, self.OnDestroy)
self.Bind(wx.EVT_BUTTON, self.OnButton)
self.Bind(wx.EVT_CHECKBOX, self.OnCheck)
ed_msg.Subscribe(self.OnThemeChange, ed_msg.EDMSG_THEME_CHANGED)
self._sctrl.RegisterClient(self)
# Set user customizable layout
state = Profile_Get('CTRLBAR', default=dict())
cfg = state.get(self.GetConfigKey(), dict())
self.SetControlStates(cfg)
def OnDestroy(self, evt):
if evt.GetId() == self.GetId():
ed_msg.Unsubscribe(self.OnThemeChange)
self._sctrl.RemoveClient(self)
def OnButton(self, evt):
"""Handle button clicks for the next/previous buttons
@param evt: wx.CommandEvent
"""
e_id = evt.GetId()
if e_id in [ID_SEARCH_NEXT, ID_SEARCH_PRE]:
self.ctrl.DoSearch(e_id == ID_SEARCH_NEXT)
elif e_id == ID_FIND_ALL:
self.ctrl.FindAll()
else:
evt.Skip()
def OnCheck(self, evt):
"""Set search options for match case, regex, ect...
@param evt: wx.CommandEvent
"""
e_id = evt.GetId()
if e_id in (ID_MATCH_CASE, ID_REGEX, ID_WHOLE_WORD):
ctrl = self.FindWindowById(e_id)
if ctrl != None:
if e_id == ID_MATCH_CASE:
flag = eclib.AFR_MATCHCASE
elif e_id == ID_WHOLE_WORD:
flag = eclib.AFR_WHOLEWORD
else:
flag = eclib.AFR_REGEX
if self.ctrl != None:
if ctrl.GetValue():
self.ctrl.SetSearchFlag(flag)
else:
self.ctrl.ClearSearchFlag(flag)
else:
evt.Skip()
def NotifyOptionChanged(self, evt):
"""Callback for L{ed_search.SearchController} to notify of update
to the find options.
@param evt: eclib.finddlg.FindEvent
"""
self.FindWindowById(ID_MATCH_CASE).SetValue(evt.IsMatchCase())
self.FindWindowById(ID_REGEX).SetValue(evt.IsRegEx())
self.FindWindowById(ID_WHOLE_WORD).SetValue(evt.IsWholeWord())
def OnThemeChange(self, msg):
"""Update icons when the theme has changed
@param msg: Message Object
"""
next = self.FindWindowById(ID_SEARCH_NEXT)
if next:
t_bmp = wx.ArtProvider.GetBitmap(str(ed_glob.ID_DOWN), wx.ART_MENU)
next.SetBitmapLabel(t_bmp)
next.SetBitmapHover(t_bmp)
next.Update()
next.Refresh()
pre = self.FindWindowById(ID_SEARCH_PRE)
if pre:
t_bmp = wx.ArtProvider.GetBitmap(str(ed_glob.ID_UP), wx.ART_MENU)
pre.SetBitmapLabel(t_bmp)
pre.SetBitmapHover(t_bmp)
pre.Update()
pre.Refresh()
def GetConfigKey(self):
"""Get the key to use for the layout config persistence.
@return: string
"""
return 'SearchBar'
#-----------------------------------------------------------------------------#
class CommandEntryBar(CommandBarBase):
"""Commandbar for editor command entry and execution."""
def __init__(self, parent):
super(CommandEntryBar, self).__init__(parent)
# Attributes
self.SetControl(CommandExecuter(self, wx.ID_ANY, size=(150, -1)))
# Setup
cmd_lbl = wx.StaticText(self, label=_("Command") + ": ")
self.info_lbl = wx.StaticText(self, label="")
self.AddControl(cmd_lbl, wx.ALIGN_LEFT)
self.AddControl(self.ctrl, 1, wx.ALIGN_LEFT)
self.AddControl(self.info_lbl, wx.ALIGN_RIGHT)
# HACK: workaround bug in mac control that resets size to
# that of the default variant after any text has been
# typed in it. Note it reports the best size as the default
# variant and causes layout issues. wxBUG
if wx.Platform == '__WXMAC__':
self.ctrl.SetSizeHints(200, 16, -1, 16)
# Setup
self.EnableMenu(False)
#-----------------------------------------------------------------------------#
class GotoLineBar(CommandBarBase):
"""Commandbar for Goto Line function"""
def __init__(self, parent):
super(GotoLineBar, self).__init__(parent)
# Attributes
self.SetControl(LineCtrl(self, wx.ID_ANY,
self._parent.nb.GetCurrentCtrl,
size=(100, -1)))
# Setup
self.EnableMenu(False)
go_lbl = wx.StaticText(self, label=_("Goto Line") + ": ")
self.AddControl(go_lbl, wx.ALIGN_LEFT)
self.AddControl(self.ctrl, wx.ALIGN_LEFT)
# HACK: workaround bug in mac control that resets size to
# that of the default variant after any text has been
# typed in it. Note it reports the best size as the default
# variant and causes layout issues. wxBUG
if wx.Platform == '__WXMAC__':
self.ctrl.SetSizeHints(100, 16, 100, 16)
#-----------------------------------------------------------------------------#
class CommandExecuter(eclib.CommandEntryBase):
"""Part of the Vi emulation, opens a minibuffer to execute EX commands.
@note: based on search ctrl so we get the nice rounded edges on wxmac.
"""
RE_GO_BUFFER = re.compile('[0-9]*[nN]{1,1}')
RE_GO_WIN = re.compile('[0-9]*n[wW]{1,1}')
RE_WGO_BUFFER = re.compile('w[0-9]*[nN]')
RE_NGO_LINE = re.compile('[+-][0-9]+')
def __init__(self, parent, id_, size=wx.DefaultSize):
"""Initializes the CommandExecuter"""
super(CommandExecuter, self).__init__(parent, id_, size=size,
style=wx.TE_PROCESS_ENTER|wx.WANTS_CHARS)
# Attributes
self._history = dict(cmds=[''], index=-1, lastval='')
if not hasattr(sys, 'frozen'):
self._curdir = os.path.abspath(os.curdir) + os.sep
else:
self._curdir = wx.GetHomeDir() + os.sep
if wx.Platform == '__WXMAC__':
self._popup = PopupList(self)
self.Bind(wx.EVT_SET_FOCUS, self.OnSetFocus)
self.Bind(wx.EVT_KILL_FOCUS, self.OnKillFocus)
else:
self._popup = PopupWinList(self)
self.Bind(wx.EVT_WINDOW_DESTROY, self.OnDestroy, self)
self.Bind(ed_event.EVT_NOTIFY, self.OnPopupNotify)
# Message handlers
ed_msg.Subscribe(self._UpdateCwd, ed_msg.EDMSG_UI_NB_CHANGED)
ed_msg.Subscribe(self._UpdateCwd, ed_msg.EDMSG_FILE_SAVED)
def OnDestroy(self, evt):
if evt.GetId() == self.GetId():
ed_msg.Unsubscribe(self._UpdateCwd)
evt.Skip()
def _AdjustSize(self):
"""Checks width of text as its added and dynamically resizes
the control as needed.
@todo: re-enable after resizing issue can be resolved
"""
pass
# ext = self.GetTextExtent(self.GetValue())[0]
# curr_w, curr_h = self.GetClientSizeTuple()
# if ext > curr_w * .5:
# max_w = self.GetParent().GetClientSize().GetWidth() * .8
# nwidth = min(ext * 1.3, max_w)
# pwidth = self._popup.GetBestSize()[0]
# if pwidth > nwidth:
# nwidth = pwidth
# self.SetClientSize((nwidth, curr_h))
# self._popup.SetSize((nwidth, -1))
# self.GetParent().Layout()
# elif ((curr_w > ext * 1.18) and curr_w > 150):
# nwidth = max(ext * 1.18, 150)
# self.SetClientSize((nwidth, curr_h))
# self.GetParent().Layout()
# else:
# pass
def _AdjustValue(self, val):
"""Adjust value of input string as autocomp provides new values
@param val: val to use as base for adjustment
"""
cval = self.GetValue().split(' ', 1)
if val.startswith(cval[-1]) or val.startswith('~'):
self.AppendText(val.replace(cval[-1], '', 1))
else:
self.SetValue(" ".join([cval[0], val]))
self.SetInsertionPoint(self.GetLastPosition())
def _UpdateCwd(self, msg):
"""Update the current working directory to that of the current
buffer.
@param msg: Message Object
"""
# Only Update if we are the currently active window
tlp = self.GetTopLevelParent()
if tlp.IsActive():
ctrl = tlp.GetNotebook().GetCurrentCtrl()
fname = ctrl.GetFileName()
if len(fname):
self._curdir = os.path.dirname(fname)
def ChangeDir(self, cmd):
"""Change to a directory based on cd command
@param cmd: cd path
"""
path = cmd.replace('cd', '', 1).strip()
if not os.path.isabs(path):
if path.startswith('..'):
path = os.path.abspath(path)
elif path.startswith('~'):
path = path.replace('~', wx.GetHomeDir(), 1)
else:
path = os.path.join(self._curdir, path)
if os.path.exists(path) and os.path.isdir(path):
if os.access(path, os.R_OK):
os.chdir(path)
self._curdir = os.path.abspath(os.path.curdir) + os.sep
else:
# Doesn't have permissions
ed_msg.PostMessage(ed_msg.EDMSG_UI_SB_TXT,
(ed_glob.SB_INFO,
_("Can't change directory to: %s") % path))
wx.Bell()
self.Clear()
else:
# Invalid path
self.Clear()
wx.Bell()
def CommandPush(self, cmd):
"""Push a command to the stack popping as necessary to
keep stack size less than MAX (currently 25 commands).
@param cmd: command string to push
@todo: redo this to be more like the code in my terminal project
"""
cmd = cmd.strip()
if not len(cmd):
return
if len(self._history['cmds']) > 25:
self._history['cmds'].pop()
if cmd != self._history['cmds'][0]:
self._history['cmds'].insert(0, cmd)
self._history['index'] = -1
def EditCommand(self, cmd):
"""Perform an edit related command
@param cmd: command string to execute
"""
# e fname: edit file
cmd = cmd[1:].strip()
frame = self.GetTopLevelParent()
cmd = ebmlib.GetPathFromURI(cmd)
if not os.path.isabs(cmd):
cmd = os.path.join(self._curdir, cmd)
if ebmlib.PathExists(cmd):
frame.DoOpen(ed_glob.ID_COMMAND_LINE_OPEN, cmd)
else:
frame.nb.OpenPage(ebmlib.GetPathName(cmd), ebmlib.GetFileName(cmd))
def ExecuteCommand(self, cmd_str):
"""Interprets and executes a command then hides the control
@param cmd_str: Command string to execute
"""
frame = self.GetTopLevelParent()
cmd = cmd_str.strip().lstrip(':')
if cmd in ['x', 'ZZ']:
cmd = 'wq'
if cmd.startswith(u'w'):
frame.OnSave(wx.MenuEvent(wx.wxEVT_COMMAND_MENU_SELECTED,
ed_glob.ID_SAVE))
if self.RE_WGO_BUFFER.match(cmd):
self.GoBuffer(cmd[1:])
elif cmd == 'wq':
self.Quit()
elif cmd.startswith(u'e '):
self.EditCommand(cmd)
elif cmd.rstrip() == u'e!':
ctrl = frame.nb.GetCurrentCtrl()
ctrl.RevertToSaved()
elif self.RE_GO_WIN.match(cmd):
self.GoWindow(cmd)
elif re.match(self.RE_GO_BUFFER, cmd):
self.GoBuffer(cmd)
elif cmd.isdigit() or self.RE_NGO_LINE.match(cmd):
ctrl = frame.nb.GetCurrentCtrl()
cline = ctrl.GetCurrentLine()
if cmd[0] in '+-':
line = eval("%s %s %s" % (str(cline), cmd[0], cmd[1:]))
else:
line = int(cmd) - 1
ctrl.GotoLine(line)
elif cmd.startswith('cd '):
self.ChangeDir(cmd)
elif cmd == 'q':
self.Quit()
else:
wx.Bell()
return
self.CommandPush(cmd_str)
self.GetParent().Hide()
def GetHistCommand(self, pre=True):
"""Look up a command from the history of recent commands
@param pre: Get previous (default) or get Next
@note: pre moves right in stack, next moves left in stack
"""
val = self.GetValue().strip()
if val not in self._history['cmds']:
self._history['lastval'] = val
if pre:
if self._history['index'] < len(self._history['cmds']) - 1\
and self._history['index'] < 25:
self._history['index'] += 1
index = self._history['index']
cmd = self._history['cmds'][index]
else:
if self._history['index'] > -1:
self._history['index'] -= 1
index = self._history['index']
if index == -1:
cmd = self._history['lastval']
else:
cmd = self._history['cmds'][index]
self.SetValue(cmd)
self.SelectAll()
def GoBuffer(self, cmd):
"""Go to next/previous buffer in notebook
@param cmd: cmd string [0-9]*[nN]
"""
count = cmd[0:-1]
cmd = cmd[-1]
if count.isdigit():
count = int(count)
else:
count = 1
frame = self.GetTopLevelParent()
numpage = frame.nb.GetPageCount()
for x in xrange(min(count, numpage)):
cpage = frame.nb.GetPageIndex(frame.nb.GetCurrentPage())
if (cpage == 0 and cmd == 'N') or \
(cpage + 1 == numpage and cmd == 'n'):
break
frame.nb.AdvanceSelection(cmd == 'n')
def GoWindow(self, cmd):
"""Go to next/previous open window
@param cmd: cmd string [0-9]*n[wW]
"""
count = cmd[0:-1]
cmd = cmd[-1]
if count.isdigit():
count = int(count)
else:
count = 1
wins = wx.GetApp().GetMainWindows()
pid = self.GetTopLevelParent().GetId()
widx = 0
win = 0
for nwin in xrange(len(wins)):
if pid == wins[nwin].GetId():
widx = pid
win = nwin
break
if cmd == 'W':
widx = win + count
else:
widx = win - count
if widx < 0:
widx = 0
elif widx >= len(wins):
widx = len(wins) - 1
self.GetParent().Hide()
wins[widx].Raise()
wx.CallAfter(wins[widx].nb.GetCurrentCtrl().SetFocus)
def GetPaths(self, path, files=False):
"""Get a list of paths that are part of the given path by
default it will only return directories.
@keyword files: Get list of files too
"""
def append_slash(path):
"""Helper function that appends a slash to the path
if it's a directory.
"""
if os.path.isdir(path) and not path.endswith(os.sep):
return path + os.sep
return path
curdir = self._curdir
head, tail = os.path.split(path)
head = os.path.expanduser(head)
head = os.path.expandvars(head)
head = os.path.join(curdir, head)
if not os.path.isdir(head):
return []
# Return empty list of user does not have
# read access to the directory
if not os.access(head, os.R_OK):
ed_msg.PostMessage(ed_msg.EDMSG_UI_SB_TXT,
(ed_glob.SB_INFO,
_("Access Denied: %s") % head))
wx.Bell() # Beep to alert
return list()
# We expanded head, so trim the suggestion list of its head
# so we can add the tail of the suggestion back to the original head
try:
candidates = [os.path.basename(p) for p in os.listdir(head)
if p.startswith(tail)]
candidates = [append_slash(os.path.join(os.path.dirname(path), cand))
for cand in candidates]
if not files:
candidates = [cand for cand in candidates if os.path.isdir(cand)]
except OSError:
ed_msg.PostMessage(ed_msg.EDMSG_UI_SB_TXT,
(ed_glob.SB_INFO, _("Invalid Path")))
candidates = list()
return sorted(list(set(candidates)))
def ListDir(self):
"""List the next directory from the current cmd path"""
cmd = self.GetValue()
if cmd.startswith('cd '):
cstr = 'cd '
elif cmd.startswith('e '):
cstr = 'e '
else:
return
cmd = cmd.replace(cstr, u'', 1).strip()
paths = self.GetPaths(cmd, cstr == 'e ')
self._popup.SetChoices(paths)
if len(paths):
self._popup.SetupPosition(self)
if not self._popup.IsShown():
self._popup.Show()
self.SetInsertionPoint(self.GetLastPosition())
def OnEnter(self, evt):
"""Get the currently entered command string and execute it.
@postcondition: ctrl is cleared and command is executed
"""
if self._popup.HasSuggestions() and self._popup.HasSelection():
psel = self._popup.GetSelection()
if self.GetValue().split(' ', 1)[-1].strip() != psel:
self._AdjustValue(psel)
self._popup.Hide()
return
cmd = self.GetValue()
self.Clear()
self.ExecuteCommand(cmd)
if self._popup.IsShown():
self._popup.Hide()
def OnKeyDown(self, evt):
"""Records the key sequence that has been entered and
performs actions based on that key sequence.
@param evt: event that called this handler
"""
e_key = evt.GetKeyCode()
cmd = self.GetValue()
if e_key == wx.WXK_UP:
if self._popup.HasSuggestions():
self._popup.AdvanceSelection(False)
else:
self.GetHistCommand(pre=True)
elif e_key == wx.WXK_DOWN:
if self._popup.HasSuggestions():
self._popup.AdvanceSelection(True)
else:
self.GetHistCommand(pre=False)
elif e_key == wx.WXK_SPACE and not len(cmd):
# Swallow space key when command is empty
pass
elif e_key == wx.WXK_TAB:
# Provide Tab Completion or swallow key
if cmd.startswith('cd ') or cmd.startswith('e '):
if self._popup.HasSuggestions():
self._AdjustValue(self._popup.GetSelection())
self.ListDir()
else:
pass
elif e_key == wx.WXK_ESCAPE:
if self._popup.IsShown():
self._popup.Hide()
else:
self.Clear()
self.GetParent().Hide()
else:
evt.Skip()
def OnKeyUp(self, evt):
"""Adjust size as needed when characters are entered
@param evt: event that called this handler
"""
e_key = evt.GetKeyCode()
if e_key == wx.WXK_ESCAPE:
evt.Skip()
return
val = self.GetValue()
cwd_info = ""
if val.strip() in ['cwd', 'e', 'cd']:
cwd_info = " " + _(u"cwd: ") + self._curdir
self.Parent.info_lbl.SetLabel(cwd_info)
if self._popup.IsShown():
if not len(val):
self._popup.Hide()
else:
wx.CallAfter(self.UpdateAutoComp)
else:
if self._popup.HasSuggestions():
self._AdjustValue(self._popup.GetSelection())
self.ListDir()
self._AdjustSize()
evt.Skip()
def OnPopupNotify(self, evt):
"""Receive the selections from the popup list
@param evt: event that called this handler
"""
val = evt.GetValue()
self._AdjustValue(val)
def OnKillFocus(self, evt):
"""Hide the popup when we look focus
@param evt: event that called this handler
"""
self._popup.Hide()
evt.Skip()
def OnSetFocus(self, evt):
"""Ensure caret is at end when focus is reset
@param evt: event that called this handler
"""
self.SetInsertionPoint(self.GetLastPosition())
evt.Skip()
def RestoreFocus(self):
"""Restore focus and cursor position
@postcondition: ctrl has focus and cursor is moved to last position
"""
self.SetInsertionPoint(self.GetLastPosition())
self.SetFocus()
def Quit(self):
"""Tell the editor to exit
@postcondition: Editor begins exit, confirming file saves
"""
wx.PostEvent(self.GetTopLevelParent(),
wx.CloseEvent(wx.wxEVT_CLOSE_WINDOW))
def SetValue(self, value):
"""Overrides the controls default function to allow for automatic
resizing of the control when text is added.
@param value: string to set value of control to
"""
super(CommandExecuter, self).SetValue(value)
self._AdjustSize()
def UpdateAutoComp(self):
"""Update the autocomp list for paths that best match current value"""
self.ListDir()
def WriteCommand(self, cstr):
"""Perform a file write related command
@param cstr: The command string to execute
"""
# wn: write and edit next
# wN: write and edit previous
# wq: write and quit
#-----------------------------------------------------------------------------#
class LineCtrl(eclib.CommandEntryBase):
"""A custom int control for providing a go To line control
for the Command Bar.
"""
def __init__(self, parent, id_, get_doc, size=wx.DefaultSize):
"""Initializes the LineCtrl control and its attributes.
@param get_doc: callback method for retrieving a reference to the
current document.
"""
super(LineCtrl, self).__init__(parent, id_, u"", size=size,
style=wx.TE_PROCESS_ENTER,
validator=util.IntValidator(0, 65535))
# Attributes
self._last = 0
self.GetDoc = get_doc
def OnEnter(self, evt):
"""Processes the entered line number
@param evt: Event that called this handler
@type evt: wx.EVT_TEXT_ENTER
"""
val = self.GetValue()
if not val.isdigit():
return
val = int(val) - 1
doc = self.GetDoc()
lines = doc.GetLineCount()
if val > lines:
val = lines
doc.GotoLine(val)
doc.SetFocus()
self.GetParent().Hide()
def OnKeyUp(self, evt):
"""Handle keyup events"""
if evt.GetEventType() != wx.wxEVT_KEY_UP:
evt.Skip()
return
e_key = evt.GetKeyCode()
if e_key == wx.WXK_ESCAPE:
# TODO change to more safely determine the context
# Currently control is only used in command bar
self.GetParent().Hide()
else:
evt.Skip()
#-----------------------------------------------------------------------------#
# TODO: merge the common parts of these two classes into a single base class
class PopupListBase(object):
"""Common functionality between Popuplist GTK and Mac"""
def AdvanceSelection(self, next=True):
"""Advance the list selection
@keyword next: goto the next or previous selection
@type next: bool
"""
sel = self._list.GetSelection()
if next:
count = self._list.GetCount()
sel += 1
if sel < count:
self._list.SetSelection(sel)
else:
sel -= 1
if sel >= 0:
self._list.SetSelection(sel)
def GetSelection(self):
"""Get the string that is currently selected in the list
@return: string selection
"""
return self._list.GetStringSelection()
def HasSelection(self):
"""Tells whether anything in the list is selected"""
return self._list.GetSelection() != wx.NOT_FOUND
def HasSuggestions(self):
"""Tell whether the list is showing suggestions"""
return self.IsShown() and self.ListCount() > 0
def ListCount(self):
"""return the number of elements in the popup list"""
return self._list.GetCount()
def GetListCtrl(self):
"""Get the ListBox control of the popupwindow"""
return self._list
def GetChoices(self):
"""Get the items as a list
@return: list of strings
"""
return self._list.GetStrings()
def SetSelection(self, index):
"""Set the selection in the list by index
@param index: zero based index to set selection by
"""
self._list.SetSelection(index)
class PopupList(wx.MiniFrame, PopupListBase):
"""Popup window with a listbox in it"""
def __init__(self, parent, choices=list(), pos=wx.DefaultPosition):
style = wx.FRAME_NO_TASKBAR | wx.FRAME_FLOAT_ON_PARENT
if wx.Platform == '__WXMAC__':
style = style | wx.BORDER_NONE | wx.POPUP_WINDOW
else:
style = style | wx.SIMPLE_BORDER
wx.MiniFrame.__init__(self, parent, pos=pos, style=style)
PopupListBase.__init__(self)
# Attributes
self._list = wx.ListBox(self, choices=choices,
style=wx.LC_REPORT | wx.LC_SINGLE_SEL |
wx.LC_NO_HEADER | wx.NO_BORDER)
# Layout
self._list.SetWindowVariant(wx.WINDOW_VARIANT_SMALL)
self.SetWindowVariant(wx.WINDOW_VARIANT_SMALL)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self._list, 1, wx.EXPAND)
self.SetSizer(sizer)
txt_h = self.GetTextExtent('/')[1]
self.SetMaxSize((-1, txt_h * 6))
self.SetAutoLayout(True)
# Event Handlers
self.Bind(wx.EVT_CHAR, lambda evt: parent.GetEventHandler().ProcessEvent(evt))
self.Bind(wx.EVT_SET_FOCUS, self.OnFocus)
self.Bind(wx.EVT_LISTBOX_DCLICK, self.OnSelection)
self.Bind(wx.EVT_SIZE, self.OnSize)
self._list.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self._list.SetFocus()
self.Hide()
def __PostEvent(self):
"""Post notification of selection to parent
@postcondition: selected string is posted to parent
"""
val = self._list.GetStringSelection()
evt = ed_event.NotificationEvent(ed_event.edEVT_NOTIFY,
self.GetId(), val, self._list)
wx.PostEvent(self.GetParent(), evt)
self.ActivateParent()
def ActivateParent(self):
"""Activate the parent window
@postcondition: parent window is raised
"""
parent = self.GetParent()
parent.Raise()
parent.SetFocus()
def OnFocus(self, evt):
"""Raise and reset the focus to the parent window whenever
we get focus.
@param evt: event that called this handler
"""
self.ActivateParent()
self.GetParent().SetFocus()
evt.Skip()
def OnKeyUp(self, evt):
"""Process key up events in the control
@param evt: event that called this handler
"""
if evt.GetKeyCode() == wx.WXK_RETURN:
self.__PostEvent()
else:
evt.Skip()
def OnSelection(self, evt):
"""Handle a selection in list by posting the result to
the parent.
@param evt: Event that called this handler
"""
self.__PostEvent()
def OnSize(self, evt):
"""Resize the listbox"""
csz = self.GetClientSize()
csz.SetWidth(csz.x + wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X))
self._list.SetSize(csz)
evt.Skip()
def Show(self, show=True):
"""Adjust size of popup and then show it
@keyword show: Should the window be shown or not
"""
res = super(PopupList, self).Show(show)
if res and show:
self.ActivateParent()
if wx.Platform == '__WXMAC__':
self.GetParent().Refresh(False)
return res
def SetChoices(self, choices):
"""Set the available choices that are shown in the list
@param choices: list of strings
"""
selection = self._list.GetSelection()
self._list.SetItems(choices)
count = self._list.GetCount()
if selection == wx.NOT_FOUND or selection >= count:
selection = 0
if count > 0:
self._list.SetSelection(selection)
def SetStringSelection(self, text):
"""Set the list selection by using a string value
@param text: string to select in list
"""
self._list.SetStringSelection(text)
def SetupPosition(self, cmd_ex):
"""Sets size and position of widget
@param cmd_ex: CommandExecuter window
"""
cmd = cmd_ex.GetValue()
cmd = cmd.split(u' ', 1)[0]
xpos = cmd_ex.GetTextExtent(cmd + u' ')[0]
pos = cmd_ex.GetScreenPosition().Get()
csize = cmd_ex.GetSize()
self.SetPosition((pos[0] + xpos, pos[1] + csize[1]))
self.ActivateParent()
#----------------------------------------------------------------------------#
class PopupWinList(wx.PopupWindow, PopupListBase):
"""Popuplist for Windows/GTK"""
def __init__(self, parent, choices=list(), pos=wx.DefaultPosition):
"""Create the popup window and its list control"""
wx.PopupWindow.__init__(self, parent)
PopupListBase.__init__(self)
# Attributes
self._list = wx.ListBox(self, choices=choices, pos=(0, 0),
style=wx.LC_REPORT | wx.LC_SINGLE_SEL |
wx.LC_NO_HEADER)
# Layout
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self._list, 0, wx.EXPAND)
self.SetSizer(sizer)
txt_h = self.GetTextExtent('/')[1]
self.SetMaxSize((-1, txt_h * 6))
self.SetAutoLayout(True)
# Event Handlers
self.Bind(wx.EVT_SIZE, self.OnSize)
def OnSize(self, evt):
"""Resize the list box to the correct size to fit."""
csz = self.GetClientSize()
csz.SetWidth(csz.x + wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X))
self._list.SetSize(csz)
evt.Skip()
def SetupPosition(self, cmd_ex):
"""Sets size and position of widget
@param cmd_ex: CommandExecuter window
"""
cmd = cmd_ex.GetValue()
cmd = cmd.split(u' ', 1)[0]
pos = cmd_ex.GetScreenPosition().Get()
csize = cmd_ex.GetSize()
xpos = cmd_ex.GetTextExtent(cmd)[0]
self._list.SetInitialSize()
self.SetInitialSize()
self.SetPosition((pos[0] + xpos, pos[1] + csize[1]))
def SetChoices(self, choices):
"""Set the available choices that are shown in the list
@param choices: list of strings
"""
selection = self._list.GetSelection()
self._list.SetItems(choices)
count = self._list.GetCount()
if selection == wx.NOT_FOUND or selection >= count:
selection = 0
if count > 0:
self._list.SetSelection(selection)
def Show(self, show=True):
"""Adjust size of popup and then show it
@keyword show: Should the window be shown or not
"""
res = super(PopupWinList, self).Show(show)
self._list.Show()
self._list.SetInitialSize()
self.SetInitialSize()
return res
|
beiko-lab/gengis
|
bin/Lib/site-packages/wx-2.8-msw-unicode/wx/tools/Editra/src/ed_cmdbar.py
|
Python
|
gpl-3.0
| 43,285 | 0.000924 |
__author__ = 'marcprengemann'
|
winterDroid/android-drawable-importer-intellij-plugin
|
json_generator/__init__.py
|
Python
|
apache-2.0
| 30 | 0 |
#!/usr/bin/env python3
# Copyright (c) 2016-2019 The Starwels developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test Hierarchical Deterministic wallet function."""
from test_framework.test_framework import StarwelsTestFramework
from test_framework.util import (
assert_equal,
connect_nodes_bi,
)
import shutil
import os
class WalletHDTest(StarwelsTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=0']]
def run_test (self):
tmpdir = self.options.tmpdir
# Make sure can't switch off usehd after wallet creation
self.stop_node(1)
self.assert_start_raises_init_error(1, ['-usehd=0'], 'already existing HD wallet')
self.start_node(1)
connect_nodes_bi(self.nodes, 0, 1)
# Make sure we use hd, keep masterkeyid
masterkeyid = self.nodes[1].getwalletinfo()['hdmasterkeyid']
assert_equal(len(masterkeyid), 40)
# create an internal key
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/0'") #first internal child key
# Import a non-HD private key in the HD wallet
non_hd_add = self.nodes[0].getnewaddress()
self.nodes[1].importprivkey(self.nodes[0].dumpprivkey(non_hd_add))
# This should be enough to keep the master key and the non-HD key
self.nodes[1].backupwallet(tmpdir + "/hd.bak")
#self.nodes[1].dumpwallet(tmpdir + "/hd.dump")
# Derive some HD addresses and remember the last
# Also send funds to each add
self.nodes[0].generate(101)
hd_add = None
num_hd_adds = 300
for i in range(num_hd_adds):
hd_add = self.nodes[1].getnewaddress()
hd_info = self.nodes[1].validateaddress(hd_add)
assert_equal(hd_info["hdkeypath"], "m/0'/0'/"+str(i)+"'")
assert_equal(hd_info["hdmasterkeyid"], masterkeyid)
self.nodes[0].sendtoaddress(hd_add, 1)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(non_hd_add, 1)
self.nodes[0].generate(1)
# create an internal key (again)
change_addr = self.nodes[1].getrawchangeaddress()
change_addrV= self.nodes[1].validateaddress(change_addr)
assert_equal(change_addrV["hdkeypath"], "m/0'/1'/1'") #second internal child key
self.sync_all()
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
self.log.info("Restore backup ...")
self.stop_node(1)
# we need to delete the complete regtest directory
# otherwise node1 would auto-recover all funds in flag the keypool keys as used
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallets/wallet.dat"))
self.start_node(1)
# Assert that derivation is deterministic
hd_add_2 = None
for _ in range(num_hd_adds):
hd_add_2 = self.nodes[1].getnewaddress()
hd_info_2 = self.nodes[1].validateaddress(hd_add_2)
assert_equal(hd_info_2["hdkeypath"], "m/0'/0'/"+str(_)+"'")
assert_equal(hd_info_2["hdmasterkeyid"], masterkeyid)
assert_equal(hd_add, hd_add_2)
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
# Needs rescan
self.stop_node(1)
self.start_node(1, extra_args=self.extra_args[1] + ['-rescan'])
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# Try a RPC based rescan
self.stop_node(1)
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/blocks"))
shutil.rmtree(os.path.join(tmpdir, "node1/regtest/chainstate"))
shutil.copyfile(os.path.join(tmpdir, "hd.bak"), os.path.join(tmpdir, "node1/regtest/wallet.dat"))
self.start_node(1, extra_args=self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
out = self.nodes[1].rescanblockchain(0, 1)
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], 1)
out = self.nodes[1].rescanblockchain()
assert_equal(out['start_height'], 0)
assert_equal(out['stop_height'], self.nodes[1].getblockcount())
assert_equal(self.nodes[1].getbalance(), num_hd_adds + 1)
# send a tx and make sure its using the internal chain for the changeoutput
txid = self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 1)
outs = self.nodes[1].decoderawtransaction(self.nodes[1].gettransaction(txid)['hex'])['vout']
keypath = ""
for out in outs:
if out['value'] != 1:
keypath = self.nodes[1].validateaddress(out['scriptPubKey']['addresses'][0])['hdkeypath']
assert_equal(keypath[0:7], "m/0'/1'")
if __name__ == '__main__':
WalletHDTest().main ()
|
starwels/starwels
|
test/functional/wallet_hd.py
|
Python
|
mit
| 5,199 | 0.003847 |
# -*- coding: utf-8 -*-
"""
This Example will show you how to use CallbackData
"""
from telebot.callback_data import CallbackData, CallbackDataFilter
from telebot import types
from telebot.async_telebot import AsyncTeleBot
from telebot.asyncio_filters import AdvancedCustomFilter
API_TOKEN = 'TOKEN'
PRODUCTS = [
{'id': '0', 'name': 'xiaomi mi 10', 'price': 400},
{'id': '1', 'name': 'samsung s20', 'price': 800},
{'id': '2', 'name': 'iphone 13', 'price': 1300}
]
bot = AsyncTeleBot(API_TOKEN)
products_factory = CallbackData('product_id', prefix='products')
def products_keyboard():
return types.InlineKeyboardMarkup(
keyboard=[
[
types.InlineKeyboardButton(
text=product['name'],
callback_data=products_factory.new(product_id=product["id"])
)
]
for product in PRODUCTS
]
)
def back_keyboard():
return types.InlineKeyboardMarkup(
keyboard=[
[
types.InlineKeyboardButton(
text='⬅',
callback_data='back'
)
]
]
)
class ProductsCallbackFilter(AdvancedCustomFilter):
key = 'config'
async def check(self, call: types.CallbackQuery, config: CallbackDataFilter):
return config.check(query=call)
@bot.message_handler(commands=['products'])
async def products_command_handler(message: types.Message):
await bot.send_message(message.chat.id, 'Products:', reply_markup=products_keyboard())
# Only product with field - product_id = 2
@bot.callback_query_handler(func=None, config=products_factory.filter(product_id='2'))
async def product_one_callback(call: types.CallbackQuery):
await bot.answer_callback_query(callback_query_id=call.id, text='Not available :(', show_alert=True)
# Any other products
@bot.callback_query_handler(func=None, config=products_factory.filter())
async def products_callback(call: types.CallbackQuery):
callback_data: dict = products_factory.parse(callback_data=call.data)
product_id = int(callback_data['product_id'])
product = PRODUCTS[product_id]
text = f"Product name: {product['name']}\n" \
f"Product price: {product['price']}"
await bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
text=text, reply_markup=back_keyboard())
@bot.callback_query_handler(func=lambda c: c.data == 'back')
async def back_callback(call: types.CallbackQuery):
await bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
text='Products:', reply_markup=products_keyboard())
bot.add_custom_filter(ProductsCallbackFilter())
import asyncio
asyncio.run(bot.polling())
|
eternnoir/pyTelegramBotAPI
|
examples/asynchronous_telebot/callback_data_examples/simple_products_example.py
|
Python
|
gpl-2.0
| 2,831 | 0.003535 |
from warpnet_framework.warpnet_client import *
from warpnet_framework.warpnet_common_params import *
from warpnet_experiment_structs import *
from twisted.internet import reactor
from datetime import *
from numpy import log10, linspace
import time
import sys
mods = [[2,2,2100,78-1]]
pktLens = [1412]; #range(1412, 91, -240) #[1440:-120:120]-28
time_on = 5*60
time_off = 0
numItrs = 1
fileName_offset = 50
#cfo = 2**20
cfo = 2**17
txGain = 55
minChanMag_D = 20
class ScriptMaster:
def startup(self):
stderr_log = open("exp_err.log", "a")
stderr_log.write("\r\n####################################################################\r\n")
stderr_log.write("%s started at %s\r\n" % (sys.argv[0], datetime.now()))
stderr_log.write("####################################################################\r\n\r\n")
stderr_log.flush()
sys.stderr = stderr_log
er_log = MyDataLogger('results/twoNode_realCFO_v%d_logging.txt' % (fileName_offset))
er_log.log("%s" % (datetime.now()) )
er_log.log("CFO: %d, Time on: %d, time off: %d, numIttrs: %d, fn_offset: %d\r\n" % (cfo, time_on, time_off, numItrs, fileName_offset))
er_log.log("Continuous test of actual CFO on emulator kits\r\n")
registerWithServer()
nodes = dict()
#WARP Nodes
createNode(nodes, Node(0, NODE_PCAP))
createNode(nodes, Node(2, NODE_PCAP))
#BER processor "node"
createNode(nodes, Node(98, NODE_PCAP)) #PHY logger
connectToServer(nodes)
controlStruct = ControlStruct()
nodes[0].addStruct('controlStruct', controlStruct)
nodes[2].addStruct('controlStruct', controlStruct)
phyCtrl0 = PHYctrlStruct()
phyCtrl1 = PHYctrlStruct()
nodes[0].addStruct('phyCtrlStruct', phyCtrl0)
nodes[2].addStruct('phyCtrlStruct', phyCtrl1)
cmdStructStart = CommandStruct(COMMANDID_STARTTRIAL, 0)
nodes[0].addStruct('cmdStructStart', cmdStructStart)
cmdStructStop = CommandStruct(COMMANDID_STOPTRIAL, 0)
nodes[0].addStruct('cmdStructStop', cmdStructStop)
cmdStructResetPER = CommandStruct(COMMANDID_RESET_PER, 0)
nodes[0].addStruct('cmdStructResetPER', cmdStructResetPER)
nodes[2].addStruct('cmdStructResetPER', cmdStructResetPER)
perStruct0 = ObservePERStruct()
perStruct1 = ObservePERStruct()
nodes[0].addStruct('perStruct', perStruct0)
nodes[2].addStruct('perStruct', perStruct1)
logParams = LogParams()
nodes[98].addStruct('logParams', logParams)
sendRegistrations(nodes)
controlStruct.packetGeneratorPeriod = mods[0][2]
controlStruct.packetGeneratorLength = pktLens[0]
controlStruct.channel = 9
controlStruct.txPower = txGain
controlStruct.modOrderHeader = mods[0][0]
controlStruct.modOrderPayload = mods[0][1]
#PHYCtrol params:
#param0: txStartOut delay
#param1: artificial txCFO
#param2: minPilotChanMag
#param3:
# [0-0x01]: PHYCTRL_BER_EN: enable BER reporting
# [1-0x02]: PHYCTRL_CFO_EN: enable CFO reporting
# [2-0x04]: PHYCTRL_PHYDUMP_EN: enable Rx PHY dumping
# [3-0x08]: PHYTRCL_EXTPKTDET_EN: use only ext pkt det
# [4-0x10]: PHYCTRL_COOP_EN: 0=nonCoop, 1=coopMode
# [5-0x20]: PHYCTRL_CFO_CORR_EN: 0=bypass CFO correction, 1=enable CFO correction
# [6-0x40]: PHYCTRL_SWAP_ANT: 0=AntA, 1=AntA_Swapped
#param4:
# [ 7:0]: src re-Tx delay
# [ 7:0]: relay AF Tx delay (only used when in COOP_TESTING)
# [15:8]: relay DF Tx delay (only used when in COOP_TESTING)
#param5: (0 ignores)
# [17: 0]: AGC IIR coef FB
#param6: (0 ignores)
# [31:16]: H_BA minEstMag (UFix16_15)
# [15: 0]: H_AA minEstMag (UFix16_15)
#param7: (0 ignores)
# [27:16]: AF blank stop
# [11: 0]: AF blank start
#param8: (0 ignores)
# [17: 0]: AGC IIR coef Gain
#param9: (Tx pkt types)
# [31: 0]: OR'd combination of PHYCTRL_TX_*
phyCtrl0.param0 = 32+12
phyCtrl0.param1 = cfo #(2**19 ~ 1.2e-4)
phyCtrl0.param2 = 0xFFF
# phyCtrl0.param3 = (PHYCTRL_COOP_EN | PHYCTRL_BER_EN)
phyCtrl0.param3 = (0) #PHYCTRL_COOP_EN)
# phyCtrl0.param4 = (251-2) #v21 timing; #######reTxDly/FFToffset: 251/12, 249/10
phyCtrl0.param4 = 255 #v22 timing
phyCtrl0.param5 = 0
phyCtrl0.param6 = 0
phyCtrl0.param7 = 0
phyCtrl0.param8 = 0
# phyCtrl0.param9 = (PHYCTRL_TX_NC | PHYCTRL_TX_DF | PHYCTRL_TX_AF | PHYCTRL_TX_AFGH | PHYCTRL_TX_DFGH | PHYCTRL_TX_NCMHOP)
phyCtrl0.param9 = (PHYCTRL_TX_NC)
phyCtrl1.param0 = 0
phyCtrl1.param1 = 0
phyCtrl1.param2 = minChanMag_D
# phyCtrl1.param3 = (PHYCTRL_CFO_CORR_EN | PHYCTRL_PHYDUMP_EN)
phyCtrl1.param3 = (PHYCTRL_PHYDUMP_EN)
phyCtrl1.param4 = 0
phyCtrl1.param5 = 0x20000
phyCtrl1.param6 = 1000 | (1000<<16)
phyCtrl1.param7 = 0
phyCtrl1.param8 = 0x20000
phyCtrl1.param9 = 0
nodes[0].sendToNode('phyCtrlStruct')
nodes[2].sendToNode('phyCtrlStruct')
nodes[0].sendToNode('controlStruct')
nodes[2].sendToNode('controlStruct')
nodes[0].sendToNode('cmdStructResetPER')
nodes[2].sendToNode('cmdStructResetPER')
trialInd = -1 #Increment before first trial, which should be trialNum=0
pktLen = pktLens[0];
#Experiment Loops
for ittr in range(1,numItrs+1):
print("Starting iteration %d of %d at %s" % (ittr, numItrs, datetime.now().strftime("%H:%M:%S")))
trialInd += 1
#Stop any traffic that might be running
nodes[0].sendToNode('cmdStructStop')
logParams.fileSuffix = fileName_offset+trialInd
logParams.param0 = ittr
logParams.param1 = 0
logParams.param2 = 0
logParams.param3 = 0
nodes[98].sendToNode('logParams')
#Reset the PER counters at all nodes
nodes[0].sendToNode('cmdStructResetPER')
nodes[2].sendToNode('cmdStructResetPER')
#Start the trial
nodes[0].sendToNode('cmdStructStart')
#Run until minTime elapses
time.sleep(time_on)
nodes[0].sendToNode('cmdStructStop')
time.sleep(time_off)
if not reactor.running:
return
print("############################################")
print("############# Experiment Done! #############")
print("############################################")
reactor.callFromThread(reactor.stop)
sm = ScriptMaster()
stdio.StandardIO(CmdReader()) #if interactive shell is needed
factory = WARPnetClient(sm.startup);
reactor.connectTCP('localhost', 10101, factory)
reactor.run()
|
shailcoolboy/Warp-Trinity
|
ResearchApps/Measurement/warpnet_coprocessors/phy_logger/examples/twoNode_cfoLogging.py
|
Python
|
bsd-2-clause
| 6,164 | 0.034069 |
import os
from cement.core import backend, handler, output
from cement.utils import test, shell
from scilifelab.pm import PmApp
from data import setup_data_files
from empty_files import setup_empty_files
## Set default configuration
filedir = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
config_defaults = backend.defaults('production', 'archive', 'config', 'project','log', 'db')
config_defaults['production']['root'] = os.path.join(filedir, "data", "production")
config_defaults['archive']['root'] = os.path.join(filedir, "data", "archive")
config_defaults['project']['root'] = os.path.join(filedir, "data", "projects")
config_defaults['project']['repos'] = os.path.join(filedir, "data", "repos")
config_defaults['config']['ignore'] = ["slurm*", "tmp*"]
config_defaults['log']['level'] = "INFO"
config_defaults['log']['file'] = os.path.join(filedir, "data", "log", "pm.log")
config_defaults['db']['url'] = "localhost"
config_defaults['db']['user'] = "u"
config_defaults['db']['password'] = "p"
config_defaults['db']['samples'] = "samples-test"
config_defaults['db']['flowcells'] = "flowcells-test"
config_defaults['db']['projects'] = "projects-test"
def safe_makedir(dname):
"""Make directory"""
if not os.path.exists(dname):
try:
os.makedirs(dname)
except OSError:
if not os.path.isdir(dname):
raise
else:
print "Directory %s already exists" % dname
return dname
## Output handler for tests
class PmTestOutputHandler(output.CementOutputHandler):
class Meta:
label = 'pmtest'
def render(self, data, template = None):
for key in data:
if data[key]:
print "{} => {}".format(key, data[key].getvalue())
## Testing app
class PmTestApp(PmApp):
class Meta:
argv = []
config_files = []
config_defaults = config_defaults
output_handler = PmTestOutputHandler
## Main pm test
class PmTest(test.CementTestCase):
app_class = PmTestApp
app = None
OUTPUT_FILES = []
def setUp(self):
setup_data_files()
setup_empty_files()
def _run_app(self):
try:
self.app.setup()
with self.app.log.log_setup.applicationbound():
self.app.run()
self.app.render(self.app._output_data)
finally:
self.app.close()
|
SciLifeLab/scilifelab
|
tests/pm/test_default.py
|
Python
|
mit
| 2,421 | 0.008674 |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pickle
from argparse import ArgumentParser
from unittest import mock
import pytest
from pytorch_lightning import Trainer
from pytorch_lightning.loggers import WandbLogger
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from tests.helpers import BoringModel
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_logger_init(wandb):
"""Verify that basic functionality of wandb logger works.
Wandb doesn't work well with pytest so we have to mock it out here."""
# test wandb.init called when there is no W&B run
wandb.run = None
logger = WandbLogger(
name="test_name", save_dir="test_save_dir", version="test_id", project="test_project", resume="never"
)
logger.log_metrics({"acc": 1.0})
wandb.init.assert_called_once_with(
name="test_name", dir="test_save_dir", id="test_id", project="test_project", resume="never", anonymous=None
)
wandb.init().log.assert_called_once_with({"acc": 1.0})
# test wandb.init and setting logger experiment externally
wandb.run = None
run = wandb.init()
logger = WandbLogger(experiment=run)
assert logger.experiment
# test wandb.init not called if there is a W&B run
wandb.init().log.reset_mock()
wandb.init.reset_mock()
wandb.run = wandb.init()
logger = WandbLogger()
# verify default resume value
assert logger._wandb_init["resume"] == "allow"
with pytest.warns(UserWarning, match="There is a wandb run already in progress"):
_ = logger.experiment
logger.log_metrics({"acc": 1.0}, step=3)
wandb.init.assert_called_once()
wandb.init().log.assert_called_once_with({"acc": 1.0, "trainer/global_step": 3})
# continue training on same W&B run and offset step
logger.finalize("success")
logger.log_metrics({"acc": 1.0}, step=6)
wandb.init().log.assert_called_with({"acc": 1.0, "trainer/global_step": 6})
# log hyper parameters
logger.log_hyperparams({"test": None, "nested": {"a": 1}, "b": [2, 3, 4]})
wandb.init().config.update.assert_called_once_with(
{"test": "None", "nested/a": 1, "b": [2, 3, 4]}, allow_val_change=True
)
# watch a model
logger.watch("model", "log", 10, False)
wandb.init().watch.assert_called_once_with("model", log="log", log_freq=10, log_graph=False)
assert logger.name == wandb.init().project_name()
assert logger.version == wandb.init().id
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_pickle(wandb, tmpdir):
"""
Verify that pickling trainer with wandb logger works.
Wandb doesn't work well with pytest so we have to mock it out here.
"""
class Experiment:
id = "the_id"
step = 0
dir = "wandb"
def project_name(self):
return "the_project_name"
wandb.run = None
wandb.init.return_value = Experiment()
logger = WandbLogger(id="the_id", offline=True)
trainer = Trainer(default_root_dir=tmpdir, max_epochs=1, logger=logger)
# Access the experiment to ensure it's created
assert trainer.logger.experiment, "missing experiment"
assert trainer.log_dir == logger.save_dir
pkl_bytes = pickle.dumps(trainer)
trainer2 = pickle.loads(pkl_bytes)
assert os.environ["WANDB_MODE"] == "dryrun"
assert trainer2.logger.__class__.__name__ == WandbLogger.__name__
assert trainer2.logger.experiment, "missing experiment"
wandb.init.assert_called()
assert "id" in wandb.init.call_args[1]
assert wandb.init.call_args[1]["id"] == "the_id"
del os.environ["WANDB_MODE"]
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_logger_dirs_creation(wandb, tmpdir):
"""Test that the logger creates the folders and files in the right place."""
logger = WandbLogger(save_dir=str(tmpdir), offline=True)
assert logger.version is None
assert logger.name is None
# mock return values of experiment
wandb.run = None
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
for _ in range(2):
_ = logger.experiment
assert logger.version == "1"
assert logger.name == "project"
assert str(tmpdir) == logger.save_dir
assert not os.listdir(tmpdir)
version = logger.version
model = BoringModel()
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=1, limit_train_batches=3, limit_val_batches=3)
assert trainer.log_dir == logger.save_dir
trainer.fit(model)
assert trainer.checkpoint_callback.dirpath == str(tmpdir / "project" / version / "checkpoints")
assert set(os.listdir(trainer.checkpoint_callback.dirpath)) == {"epoch=0-step=2.ckpt"}
assert trainer.log_dir == logger.save_dir
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_log_model(wandb, tmpdir):
"""Test that the logger creates the folders and files in the right place."""
wandb.run = None
model = BoringModel()
# test log_model=True
logger = WandbLogger(log_model=True)
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
wandb.init().log_artifact.assert_called_once()
# test log_model='all'
wandb.init().log_artifact.reset_mock()
wandb.init.reset_mock()
logger = WandbLogger(log_model="all")
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
assert wandb.init().log_artifact.call_count == 2
# test log_model=False
wandb.init().log_artifact.reset_mock()
wandb.init.reset_mock()
logger = WandbLogger(log_model=False)
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
assert not wandb.init().log_artifact.called
# test correct metadata
import pytorch_lightning.loggers.wandb as pl_wandb
pl_wandb._WANDB_GREATER_EQUAL_0_10_22 = True
wandb.init().log_artifact.reset_mock()
wandb.init.reset_mock()
wandb.Artifact.reset_mock()
logger = pl_wandb.WandbLogger(log_model=True)
logger.experiment.id = "1"
logger.experiment.project_name.return_value = "project"
trainer = Trainer(default_root_dir=tmpdir, logger=logger, max_epochs=2, limit_train_batches=3, limit_val_batches=3)
trainer.fit(model)
wandb.Artifact.assert_called_once_with(
name="model-1",
type="model",
metadata={
"score": None,
"original_filename": "epoch=1-step=5-v3.ckpt",
"ModelCheckpoint": {
"monitor": None,
"mode": "min",
"save_last": None,
"save_top_k": 1,
"save_weights_only": False,
"_every_n_train_steps": 0,
},
},
)
def test_wandb_sanitize_callable_params(tmpdir):
"""
Callback function are not serializiable. Therefore, we get them a chance to return
something and if the returned type is not accepted, return None.
"""
opt = "--max_epochs 1".split(" ")
parser = ArgumentParser()
parser = Trainer.add_argparse_args(parent_parser=parser)
params = parser.parse_args(opt)
def return_something():
return "something"
params.something = return_something
def wrapper_something():
return return_something
params.wrapper_something_wo_name = lambda: lambda: "1"
params.wrapper_something = wrapper_something
params = WandbLogger._convert_params(params)
params = WandbLogger._flatten_dict(params)
params = WandbLogger._sanitize_callable_params(params)
assert params["gpus"] == "None"
assert params["something"] == "something"
assert params["wrapper_something"] == "wrapper_something"
assert params["wrapper_something_wo_name"] == "<lambda>"
@mock.patch("pytorch_lightning.loggers.wandb.wandb")
def test_wandb_logger_offline_log_model(wandb, tmpdir):
"""Test that log_model=True raises an error in offline mode"""
with pytest.raises(MisconfigurationException, match="checkpoints cannot be uploaded in offline mode"):
_ = WandbLogger(save_dir=str(tmpdir), offline=True, log_model=True)
|
williamFalcon/pytorch-lightning
|
tests/loggers/test_wandb.py
|
Python
|
apache-2.0
| 9,157 | 0.001747 |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2017 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import sys
import requests
from django.core.management.base import BaseCommand
from geonode.geoserver.helpers import gs_catalog, ogc_server_settings
from geonode.layers.models import Layer
from geonode.people.models import Profile
def fix_layer_in_gwc(layer):
print 'Fixing %s in GWC' % layer.alternate
headers = {'Content-Type': 'application/xml'}
url = ogc_server_settings.public_url
user = gs_catalog.username
password = gs_catalog.password
url_xml = "%sgwc/rest/layers/%s.xml" % (url, layer.alternate)
print url_xml
resp = requests.get(url=url_xml, auth=(user, password))
xml = resp.content
xml_fixed = xml.replace('<mimeFormats>\n <string>image/png</string>\n </mimeFormats>', '<mimeFormats>\n <string>image/png</string>\n <string>image/png8</string>\n </mimeFormats>')
data = xml_fixed.replace('\n', '')
print requests.post(url_xml, data=data, headers=headers, auth=(user, password)).text
def is_gs_resource_valid(layer):
gs_resource = gs_catalog.get_resource(
layer.name,
store=layer.store,
workspace=layer.workspace)
if gs_resource:
return True
else:
return False
class Command(BaseCommand):
"""
Fixes migrated WorldMap layers (from 1.2 to 2.8.x).
This includes:
1) layer.save to generate syles, links and thumbnails and sync with sync_geofence
2) fixes GWC
The command detects also broken GeoNode layers (layer without a resource in GeoServer)
"""
help = 'Fixes migrated WorldMap layers (from 1.2 to 2.8.x)'
def add_arguments(self, parser):
parser.add_argument(
'--layername',
dest='layername',
default=None,
help='Filter by a layername.',
)
parser.add_argument(
'--owner',
dest='owner',
default=None,
help='Filter by a owner.',
)
def handle(self, **options):
if options['layername']:
layers = Layer.objects.filter(name__icontains=options['layername'])
else:
layers = Layer.objects.all()
if options['owner']:
layers = layers.filter(owner=Profile.objects.filter(username=options['owner']))
layers_count = layers.count()
count = 0
layer_errors = []
for layer in layers:
count += 1
try:
print 'Fixing layer %s/%s: %s owned by %s' % (count,
layers_count,
layer.alternate,
layer.owner.username)
if is_gs_resource_valid(layer):
print 'Saving %s layer' % layer.alternate
layer.save()
fix_layer_in_gwc(layer)
else:
print 'Layer %s is broken' % layer.alternate
layer_errors.append(layer)
if options['remove']:
print 'Removing this layer...'
layer.delete()
except:
print("Unexpected error:", sys.exc_info()[0])
print '\n***** Layers with errors: %s in a total of %s *****' % (len(layer_errors), layers_count)
for layer_error in layer_errors:
print '%s by %s' % (layer.alternate, layer.owner.username)
|
cga-harvard/worldmap
|
worldmap/management/commands/fix_migrated_layers.py
|
Python
|
gpl-3.0
| 4,316 | 0.001854 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
A collection of helper functions used by the CLI and its Plugins.
"""
import imp
import importlib
import ipaddress
import json
import os
import re
import textwrap
import urllib.parse
from kazoo.client import KazooClient
from cli.exceptions import CLIException
def import_modules(package_paths, module_type):
"""
Looks for python packages under `package_paths` and imports
them as modules. Returns a dictionary of the basename of the
`package_paths` to the imported modules.
"""
modules = {}
for package_path in package_paths:
# We put the imported module into the namespace of
# "mesos.<module_type>.<>" to keep it from cluttering up
# the import namespace elsewhere.
package_name = os.path.basename(package_path)
package_dir = os.path.dirname(package_path)
module_name = "cli." + module_type + "." + package_name
try:
module = importlib.import_module(module_name)
except Exception:
obj, filename, data = imp.find_module(package_name, [package_dir])
module = imp.load_module(module_name, obj, filename, data)
modules[package_name] = module
return modules
def get_module(modules, import_path):
"""
Given a modules dictionary returned by `import_modules()`,
return a reference to the module at `import_path` relative
to the base module. For example, get_module(modules, "example.stuff")
will return a reference to the "stuff" module inside the
imported "example" plugin.
"""
import_path = import_path.split('.')
try:
module = modules[import_path[0]]
if len(import_path) > 1:
module = getattr(module, ".".join(import_path[1:]))
except Exception as exception:
raise CLIException("Unable to get module: {error}"
.format(error=str(exception)))
return module
def completions(comp_words, current_word, argv):
"""
Helps autocomplete by returning the appropriate
completion words under three conditions.
1) Returns `comp_words` if the completion word is
potentially in that list.
2) Returns an empty list if there is no possible
completion.
3) Returns `None` if the autocomplete is already done.
"""
comp_words += ["-h", "--help", "--version"]
if not argv:
return comp_words
if len(argv) == 1:
if argv[0] not in comp_words and current_word:
return comp_words
if argv[0] in comp_words and current_word:
return comp_words
if argv[0] not in comp_words and not current_word:
return []
if argv[0] in comp_words and not current_word:
return None
if len(argv) > 1 and argv[0] not in comp_words:
return []
if len(argv) > 1 and argv[0] in comp_words:
return None
raise CLIException("Unreachable")
def format_commands_help(cmds):
"""
Helps format plugin commands for display.
"""
longest_cmd_name = max(list(cmds.keys()), key=len)
help_string = ""
for cmd in sorted(cmds.keys()):
# For the top-level entry point, `cmds` is a single-level
# dictionary with `short_help` as the values. For plugins,
# `cmds` is a two-level dictionary, where `short_help` is a
# field in each sub-dictionary.
short_help = cmds[cmd]
if isinstance(short_help, dict):
short_help = short_help["short_help"]
num_spaces = len(longest_cmd_name) - len(cmd) + 2
help_string += " %s%s%s\n" % (cmd, " " * num_spaces, short_help)
return help_string
def format_subcommands_help(cmd):
"""
Helps format plugin subcommands for display.
"""
arguments = " ".join(cmd["arguments"])
short_help = cmd["short_help"]
long_help = textwrap.dedent(cmd["long_help"].rstrip())
long_help = " " + "\n ".join(long_help.lstrip().split('\n'))
flags = cmd["flags"]
flags["-h --help"] = "Show this screen."
flag_string = ""
if list(flags.keys()) != 0:
longest_flag_name = max(list(flags.keys()), key=len)
for flag in sorted(flags.keys()):
num_spaces = len(longest_flag_name) - len(flag) + 2
flag_string += " %s%s%s\n" % (flag, " " * num_spaces, flags[flag])
flag_string = flag_string.rstrip()
return (arguments, short_help, long_help, flag_string)
def join_plugin_paths(settings, config):
"""
Return all the plugin paths combined
from both settings and the config file.
"""
builtin_paths = settings.PLUGINS
try:
config_paths = config.plugins()
except Exception as exception:
raise CLIException("Error: {error}.".format(error=str(exception)))
return builtin_paths + config_paths
def sanitize_address(address):
"""
Sanitize an address, ensuring that it has a format recognizable by the CLI.
"""
# Try and parse the address to make sure it is parseable.
try:
parsed = urllib.parse.urlparse(address)
except Exception as exception:
raise CLIException("Unable to parse address: {error}"
.format(error=str(exception)))
# Since we allow addresses to be specified without an
# explicit scheme, some fields in the parsed address may
# be missing. Patch it up to force an implicit HTTP scheme.
if parsed.scheme == "" and parsed.netloc == "":
address = "http://{addr}".format(addr=address)
elif parsed.scheme == "" and parsed.netloc != "":
address = "http:{addr}".format(addr=address)
# Try and parse the address again to make sure it
# now has all the parts we expect and that they are valid.
try:
parsed = urllib.parse.urlparse(address)
except Exception as exception:
raise CLIException("Unable to parse address: {error}"
.format(error=str(exception)))
# We only support HTTP and HTTPS schemes.
if parsed.scheme != "http" and parsed.scheme != "https":
raise CLIException("Invalid scheme '{scheme}' in address"
.format(scheme=parsed.scheme))
# There must be a hostname present.
if parsed.hostname == "":
raise CLIException("Missing hostname in address")
# We do not support IPv6 in the hostname (yet).
try:
ipaddress.IPv6Address(parsed.hostname)
raise CLIException("IPv6 addresses are unsupported")
except Exception as exception:
pass
valid_ip_v4_address = False
# We either accept IPv4 addresses, or DNS names as the hostname. In the
# check below we try and parse the hostname as an IPv4 address, if this
# does not succeed, then we assume the hostname is formatted as a DNS name.
try:
ipaddress.IPv4Address(parsed.hostname)
valid_ip_v4_address = True
except Exception as exception:
pass
# If we have an IPv4 address then we require a port to be specified.
if valid_ip_v4_address and parsed.port is None:
raise CLIException("Addresses formatted as IP must contain a port")
# We allow ports for both IPv4 addresses and DNS
# names, but they must be in a specific range.
if parsed.port and (parsed.port < 0 or parsed.port > 65535):
raise CLIException("Port '{port}' is out of range"
.format(port=parsed.port))
return address
def zookeeper_resolve_leader(addresses, path):
"""
Resolve the leader using a znode path. ZooKeeper imposes a total
order on the elements of the queue, guaranteeing that the
oldest element of the queue is the first one. We can
thus return the first address we get from ZooKeeper.
"""
hosts = ",".join(addresses)
try:
zk = KazooClient(hosts=hosts)
zk.start()
except Exception as exception:
raise CLIException("Unable to initialize Zookeeper Client: {error}"
.format(error=exception))
try:
children = zk.get_children(path)
except Exception as exception:
raise CLIException("Unable to get children of {zk_path}: {error}"
.format(zk_path=path, error=exception))
masters = sorted(
# 'json.info' is the prefix for master nodes.
child for child in children if child.startswith("json.info")
)
address = ""
for master in masters:
try:
node_path = "{path}/{node}".format(path=path, node=master)
json_data, _ = zk.get(node_path)
except Exception as exception:
raise CLIException("Unable to get the value of '{node}': {error}"
.format(node=node_path, error=exception))
try:
data = json.loads(json_data)
except Exception as exception:
raise CLIException("Could not load JSON from '{data}': {error}"
.format(data=data, error=str(exception)))
if ("address" in data and "ip" in data["address"] and
"port" in data["address"]):
address = "{ip}:{port}".format(ip=data["address"]["ip"],
port=data["address"]["port"])
break
try:
zk.stop()
except Exception as exception:
raise CLIException("Unable to stop Zookeeper Client: {error}"
.format(error=exception))
if not address:
raise CLIException("Unable to resolve the leading"
" master using ZooKeeper")
return address
class Table():
"""
Defines a custom table structure for printing to the terminal.
"""
def __init__(self, columns):
"""
Initialize a table with a list of column names
to act as headers for each column in the table.
"""
if not isinstance(columns, list):
raise CLIException("Column headers must be supplied as a list")
for column in columns:
if re.search(r"(\s)\1{2,}", column):
raise CLIException("Column headers cannot have more"
" than one space between words")
self.table = [columns]
self.padding = [len(column) for column in columns]
def __getitem__(self, index):
return list(self.table[index])
def dimensions(self):
"""
Returns the dimensions of the table as (<num-rows>, <num-columns>).
"""
return (len(self.table), len(self.table[0]))
def add_row(self, row):
"""
Add a row to the table. Input must be a list where each entry
corresponds to its respective column in order.
"""
if len(row) != len(self.table[0]):
raise CLIException("Number of entries and columns do not match!")
# Adjust padding for each column.
for index, elem in enumerate(row):
if len(elem) > self.padding[index]:
self.padding[index] = len(elem)
self.table.append(row)
def __str__(self):
"""
Convert a table to string for printing.
"""
table_string = ""
for r_index, row in enumerate(self.table):
for index, entry in enumerate(row):
table_string += "%s%s" % \
(entry, " " * (self.padding[index] - len(entry) + 2))
if r_index != len(self.table) - 1:
table_string += "\n"
return table_string
@staticmethod
def parse(string):
"""
Parse a string previously printed as a `Table` back into a `Table`.
"""
lines = string.split("\n")
# Find the location and contents of column headers in the string.
# Assume only single spaces between words in column headers.
matches = re.finditer(r"([\w\d]+\s?[\w\d]+)+", lines[0])
columns = [(m.start(), m.group()) for m in matches]
# Build a table from the column header contents.
table = Table([c[1] for c in columns])
# Fill in the subsequent rows.
for line in lines[1:]:
row = []
start_indices = [c[0] for c in columns]
for i, start_index in enumerate(start_indices):
if i + 1 < len(start_indices):
column = line[start_index:start_indices[i + 1]]
else:
column = line[start_index:]
row.append(str(column.strip()))
table.add_row(row)
return table
|
Gilbert88/mesos
|
src/python/cli_new/lib/cli/util.py
|
Python
|
apache-2.0
| 13,337 | 0 |
from collections import OrderedDict
from purl import URL as PURL
def URL(base, path, segments=None, defaults=None):
"""
URL segment handler capable of getting and setting segments by name. The
URL is constructed by joining base, path and segments.
For each segment a property capable of getting and setting that segment is
created dinamically.
"""
# Make a copy of the Segments class
url_class = type(Segments.__name__, Segments.__bases__,
dict(Segments.__dict__))
segments = [] if segments is None else segments
defaults = [] if defaults is None else defaults
# For each segment attach a property capable of getting and setting it
for segment in segments:
setattr(url_class, segment, url_class._segment(segment))
# Instantiate the class with the actual parameters
return url_class(base, path, segments, defaults)
class Segments(object):
"""
URL segment handler, not intended for direct use. The URL is constructed by
joining base, path and segments.
"""
def __init__(self, base, path, segments, defaults):
# Preserve the base URL
self.base = PURL(base, path=path)
# Map the segments and defaults lists to an ordered dict
self.segments = OrderedDict(zip(segments, defaults))
def build(self):
# Join base segments and segments
segments = self.base.path_segments() + tuple(self.segments.values())
# Create a new URL with the segments replaced
url = self.base.path_segments(segments)
return url
def full_path(self):
full_path = self.build().as_string()
full_path = full_path.replace(self.base.host(), '')
full_path = full_path.replace(self.base.scheme(), '')
return full_path[4:]
def __str__(self):
return self.build().as_string()
def _get_segment(self, segment):
return self.segments[segment]
def _set_segment(self, segment, value):
self.segments[segment] = value
@classmethod
def _segment(cls, segment):
"""
Returns a property capable of setting and getting a segment.
"""
return property(
fget=lambda x: cls._get_segment(x, segment),
fset=lambda x, v: cls._set_segment(x, segment, v),
)
|
dr4ke616/LazyTorrent
|
application/lib/the_pirate_bay/utils.py
|
Python
|
gpl-3.0
| 2,327 | 0 |
#!/usr/bin/python2.7
# -*- encoding: UTF-8 -*-
# gps_test created on 15/8/22 上午12:44
# Copyright 2014 offbye@gmail.com
"""
"""
__author__ = ['"Xitao":<offbye@gmail.com>']
import gps3
gps_connection = gps3.GPSDSocket()
gps_fix = gps3.Fix()
try:
for new_data in gps_connection:
if new_data:
gps_fix.refresh(new_data)
print(gps_fix.TPV['time'])
print(gps_fix.TPV['lat'])
print(gps_fix.TPV['lon'])
print(gps_fix.SKY['gdop'])
except KeyboardInterrupt:
gps_connection.close()
print('\nTerminated by user\nGood Bye.\n')
|
offbye/PiBoat
|
pyboat/gps_test.py
|
Python
|
apache-2.0
| 604 | 0.003333 |
from .models import EmailUser
class EmailOrPhoneModelBackend:
def authenticate(self, username=None, password=None):
if '@' in username:
kwargs = {'email__iexact': username}
else:
kwargs = {'phone': username}
try:
user = EmailUser.objects.get(**kwargs)
if user.check_password(password):
return user
except EmailUser.DoesNotExist:
return None
def get_user(self, user_id):
try:
return EmailUser.objects.get(pk=user_id)
except EmailUser.DoesNotExist:
return None
|
pannkotsky/groupmate
|
backend/apps/users/login_backend.py
|
Python
|
mit
| 618 | 0 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoServer.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
AcroManiac/AcroLink
|
Server/DjangoServer/manage.py
|
Python
|
gpl-3.0
| 810 | 0 |
import unittest
from tests.test_basic import BaseTestCase
from datetime import timedelta, datetime, tzinfo
class UTC(tzinfo):
"""UTC"""
def utcoffset(self, dt):
return timedelta(0)
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return timedelta(0)
class UtilTestCase(BaseTestCase):
"""
Tests utils
"""
def test_parse_iso_8601_time_str(self):
"""
At times, Amazon hands us a timestamp with no microseconds.
"""
import datetime
from route53.util import parse_iso_8601_time_str
self.assertEqual(parse_iso_8601_time_str('2013-07-28T01:00:01Z'),
datetime.datetime(2013, 7, 28, 1, 0, 1, 0, \
tzinfo=UTC()))
self.assertEqual(parse_iso_8601_time_str('2013-07-28T01:00:01.001Z'),
datetime.datetime(2013, 7, 28, 1, 0, 1, 1000, \
tzinfo=UTC()))
|
EricSchles/python-route53
|
tests/test_util.py
|
Python
|
mit
| 916 | 0.009825 |
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
def get_test_requirements_from_tox_ini(path):
result = []
between_begin_and_end = False
with open(os.path.join(path, 'tox.ini'), 'rt') as fp:
for line in fp:
if line.strip() == '# BEGIN testing deps':
between_begin_and_end = True
elif line.strip() == '# END testing deps' or not line[0].isspace():
between_begin_and_end = False
elif between_begin_and_end:
result.append(line.strip())
return result
def get_long_description(path):
"""
Get long description from file.
"""
if path:
with open(path, 'rt') as fp:
return fp.read()
return None
|
suutari/shoop
|
shuup_setup_utils/parsing.py
|
Python
|
agpl-3.0
| 928 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handlers for placement API.
Individual handlers are associated with URL paths in the
ROUTE_DECLARATIONS dictionary. At the top level each key is a Routes
compliant path. The value of that key is a dictionary mapping
individual HTTP request methods to a Python function representing a
simple WSGI application for satisfying that request.
The ``make_map`` method processes ROUTE_DECLARATIONS to create a
Routes.Mapper, including automatic handlers to respond with a
405 when a request is made against a valid URL with an invalid
method.
"""
import routes
import webob
from oslo_log import log as logging
from nova.api.openstack.placement.handlers import aggregate
from nova.api.openstack.placement.handlers import allocation
from nova.api.openstack.placement.handlers import allocation_candidate
from nova.api.openstack.placement.handlers import inventory
from nova.api.openstack.placement.handlers import resource_class
from nova.api.openstack.placement.handlers import resource_provider
from nova.api.openstack.placement.handlers import root
from nova.api.openstack.placement.handlers import trait
from nova.api.openstack.placement.handlers import usage
from nova.api.openstack.placement import policy
from nova.api.openstack.placement import util
from nova import exception
from nova.i18n import _
LOG = logging.getLogger(__name__)
# URLs and Handlers
# NOTE(cdent): When adding URLs here, do not use regex patterns in
# the path parameters (e.g. {uuid:[0-9a-zA-Z-]+}) as that will lead
# to 404s that are controlled outside of the individual resources
# and thus do not include specific information on the why of the 404.
ROUTE_DECLARATIONS = {
'/': {
'GET': root.home,
},
# NOTE(cdent): This allows '/placement/' and '/placement' to
# both work as the root of the service, which we probably want
# for those situations where the service is mounted under a
# prefix (as it is in devstack). While weird, an empty string is
# a legit key in a dictionary and matches as desired in Routes.
'': {
'GET': root.home,
},
'/resource_classes': {
'GET': resource_class.list_resource_classes,
'POST': resource_class.create_resource_class
},
'/resource_classes/{name}': {
'GET': resource_class.get_resource_class,
'PUT': resource_class.update_resource_class,
'DELETE': resource_class.delete_resource_class,
},
'/resource_providers': {
'GET': resource_provider.list_resource_providers,
'POST': resource_provider.create_resource_provider
},
'/resource_providers/{uuid}': {
'GET': resource_provider.get_resource_provider,
'DELETE': resource_provider.delete_resource_provider,
'PUT': resource_provider.update_resource_provider
},
'/resource_providers/{uuid}/inventories': {
'GET': inventory.get_inventories,
'POST': inventory.create_inventory,
'PUT': inventory.set_inventories,
'DELETE': inventory.delete_inventories
},
'/resource_providers/{uuid}/inventories/{resource_class}': {
'GET': inventory.get_inventory,
'PUT': inventory.update_inventory,
'DELETE': inventory.delete_inventory
},
'/resource_providers/{uuid}/usages': {
'GET': usage.list_usages
},
'/resource_providers/{uuid}/aggregates': {
'GET': aggregate.get_aggregates,
'PUT': aggregate.set_aggregates
},
'/resource_providers/{uuid}/allocations': {
'GET': allocation.list_for_resource_provider,
},
'/allocations/{consumer_uuid}': {
'GET': allocation.list_for_consumer,
'PUT': allocation.set_allocations,
'DELETE': allocation.delete_allocations,
},
'/allocation_candidates': {
'GET': allocation_candidate.list_allocation_candidates,
},
'/traits': {
'GET': trait.list_traits,
},
'/traits/{name}': {
'GET': trait.get_trait,
'PUT': trait.put_trait,
'DELETE': trait.delete_trait,
},
'/resource_providers/{uuid}/traits': {
'GET': trait.list_traits_for_resource_provider,
'PUT': trait.update_traits_for_resource_provider,
'DELETE': trait.delete_traits_for_resource_provider
},
'/usages': {
'GET': usage.get_total_usages,
},
}
def dispatch(environ, start_response, mapper):
"""Find a matching route for the current request.
If no match is found, raise a 404 response.
If there is a matching route, but no matching handler
for the given method, raise a 405.
"""
result = mapper.match(environ=environ)
if result is None:
raise webob.exc.HTTPNotFound(
json_formatter=util.json_error_formatter)
# We can't reach this code without action being present.
handler = result.pop('action')
environ['wsgiorg.routing_args'] = ((), result)
return handler(environ, start_response)
def handle_405(environ, start_response):
"""Return a 405 response when method is not allowed.
If _methods are in routing_args, send an allow header listing
the methods that are possible on the provided URL.
"""
_methods = util.wsgi_path_item(environ, '_methods')
headers = {}
if _methods:
# Ensure allow header is a python 2 or 3 native string (thus
# not unicode in python 2 but stay a string in python 3)
# In the process done by Routes to save the allowed methods
# to its routing table they become unicode in py2.
headers['allow'] = str(_methods)
# Use Exception class as WSGI Application. We don't want to raise here.
response = webob.exc.HTTPMethodNotAllowed(
_('The method specified is not allowed for this resource.'),
headers=headers, json_formatter=util.json_error_formatter)
return response(environ, start_response)
def make_map(declarations):
"""Process route declarations to create a Route Mapper."""
mapper = routes.Mapper()
for route, targets in declarations.items():
allowed_methods = []
for method in targets:
mapper.connect(route, action=targets[method],
conditions=dict(method=[method]))
allowed_methods.append(method)
allowed_methods = ', '.join(allowed_methods)
mapper.connect(route, action=handle_405, _methods=allowed_methods)
return mapper
class PlacementHandler(object):
"""Serve Placement API.
Dispatch to handlers defined in ROUTE_DECLARATIONS.
"""
def __init__(self, **local_config):
# NOTE(cdent): Local config currently unused.
self._map = make_map(ROUTE_DECLARATIONS)
def __call__(self, environ, start_response):
# All requests but '/' require admin.
if environ['PATH_INFO'] != '/':
context = environ['placement.context']
# TODO(cdent): Using is_admin everywhere (except /) is
# insufficiently flexible for future use case but is
# convenient for initial exploration.
if not policy.placement_authorize(context, 'placement'):
raise webob.exc.HTTPForbidden(
_('admin required'),
json_formatter=util.json_error_formatter)
# Check that an incoming request with a content-length header
# that is an integer > 0 and not empty, also has a content-type
# header that is not empty. If not raise a 400.
clen = environ.get('CONTENT_LENGTH')
try:
if clen and (int(clen) > 0) and not environ.get('CONTENT_TYPE'):
raise webob.exc.HTTPBadRequest(
_('content-type header required when content-length > 0'),
json_formatter=util.json_error_formatter)
except ValueError as exc:
raise webob.exc.HTTPBadRequest(
_('content-length header must be an integer'),
json_formatter=util.json_error_formatter)
try:
return dispatch(environ, start_response, self._map)
# Trap the NotFound exceptions raised by the objects used
# with the API and transform them into webob.exc.HTTPNotFound.
except exception.NotFound as exc:
raise webob.exc.HTTPNotFound(
exc, json_formatter=util.json_error_formatter)
# Trap the HTTPNotFound that can be raised by dispatch()
# when no route is found. The exception is passed through to
# the FaultWrap middleware without causing an alarming log
# message.
except webob.exc.HTTPNotFound:
raise
except Exception as exc:
LOG.exception("Uncaught exception")
raise
|
jianghuaw/nova
|
nova/api/openstack/placement/handler.py
|
Python
|
apache-2.0
| 9,268 | 0 |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Utilities for console input and output.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import codecs
import locale
import re
import math
import multiprocessing
import os
import struct
import sys
import threading
import time
try:
import fcntl
import termios
import signal
_CAN_RESIZE_TERMINAL = True
except ImportError:
_CAN_RESIZE_TERMINAL = False
from ..extern import six
from ..extern.six.moves import range
from .. import conf
from .misc import isiterable
from .decorators import classproperty
__all__ = [
'isatty', 'color_print', 'human_time', 'human_file_size',
'ProgressBar', 'Spinner', 'print_code_line', 'ProgressBarOrSpinner',
'terminal_size']
_DEFAULT_ENCODING = 'utf-8'
class _IPython(object):
"""Singleton class given access to IPython streams, etc."""
@classproperty
def get_ipython(cls):
try:
from IPython import get_ipython
except ImportError:
pass
return get_ipython
@classproperty
def OutStream(cls):
if not hasattr(cls, '_OutStream'):
cls._OutStream = None
try:
cls.get_ipython()
except NameError:
return None
try:
from ipykernel.iostream import OutStream
except ImportError:
try:
from IPython.zmq.iostream import OutStream
except ImportError:
from IPython import version_info
if version_info[0] >= 4:
return None
try:
from IPython.kernel.zmq.iostream import OutStream
except ImportError:
return None
cls._OutStream = OutStream
return cls._OutStream
@classproperty
def ipyio(cls):
if not hasattr(cls, '_ipyio'):
try:
from IPython.utils import io
except ImportError:
cls._ipyio = None
else:
cls._ipyio = io
return cls._ipyio
@classproperty
def IOStream(cls):
if cls.ipyio is None:
return None
else:
return cls.ipyio.IOStream
@classmethod
def get_stream(cls, stream):
return getattr(cls.ipyio, stream)
def _get_stdout(stderr=False):
"""
This utility function contains the logic to determine what streams to use
by default for standard out/err.
Typically this will just return `sys.stdout`, but it contains additional
logic for use in IPython on Windows to determine the correct stream to use
(usually ``IPython.util.io.stdout`` but only if sys.stdout is a TTY).
"""
if stderr:
stream = 'stderr'
else:
stream = 'stdout'
sys_stream = getattr(sys, stream)
if not isatty(sys_stream) or _IPython.OutStream is None:
return sys_stream
# Our system stream is an atty and we're in ipython.
ipyio_stream = _IPython.get_stream(stream)
if ipyio_stream is not None and isatty(ipyio_stream):
# Use the IPython console output stream
return ipyio_stream
else:
# sys.stdout was set to some other non-TTY stream (a file perhaps)
# so just use it directly
return sys_stream
def isatty(file):
"""
Returns `True` if ``file`` is a tty.
Most built-in Python file-like objects have an `isatty` member,
but some user-defined types may not, so this assumes those are not
ttys.
"""
if (multiprocessing.current_process().name != 'MainProcess' or
threading.current_thread().getName() != 'MainThread'):
return False
if hasattr(file, 'isatty'):
return file.isatty()
# Use two isinstance calls to only evaluate IOStream when necessary.
if (_IPython.OutStream is None or
(not isinstance(file, _IPython.OutStream) and
not isinstance(file, _IPython.IOStream))):
return False
# File is an IPython OutStream or IOStream. Check whether:
# - File name is 'stdout'; or
# - File wraps a Console
if getattr(file, 'name', None) == 'stdout':
return True
if hasattr(file, 'stream'):
# On Windows, in IPython 2 the standard I/O streams will wrap
# pyreadline.Console objects if pyreadline is available; this should
# be considered a TTY.
try:
from pyreadyline.console import Console as PyreadlineConsole
except ImportError:
return False
return isinstance(file.stream, PyreadlineConsole)
return False
def terminal_size(file=None):
"""
Returns a tuple (height, width) containing the height and width of
the terminal.
This function will look for the width in height in multiple areas
before falling back on the width and height in astropy's
configuration.
"""
if file is None:
file = _get_stdout()
try:
s = struct.pack(str("HHHH"), 0, 0, 0, 0)
x = fcntl.ioctl(file, termios.TIOCGWINSZ, s)
(lines, width, xpixels, ypixels) = struct.unpack(str("HHHH"), x)
if lines > 12:
lines -= 6
if width > 10:
width -= 1
if lines <= 0 or width <= 0:
raise Exception('unable to get terminal size')
return (lines, width)
except Exception:
try:
# see if POSIX standard variables will work
return (int(os.environ.get('LINES')),
int(os.environ.get('COLUMNS')))
except TypeError:
# fall back on configuration variables, or if not
# set, (25, 80)
lines = conf.max_lines
width = conf.max_width
if lines is None:
lines = 25
if width is None:
width = 80
return lines, width
def _color_text(text, color):
"""
Returns a string wrapped in ANSI color codes for coloring the
text in a terminal::
colored_text = color_text('Here is a message', 'blue')
This won't actually effect the text until it is printed to the
terminal.
Parameters
----------
text : str
The string to return, bounded by the color codes.
color : str
An ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
"""
color_mapping = {
'black': '0;30',
'red': '0;31',
'green': '0;32',
'brown': '0;33',
'blue': '0;34',
'magenta': '0;35',
'cyan': '0;36',
'lightgrey': '0;37',
'default': '0;39',
'darkgrey': '1;30',
'lightred': '1;31',
'lightgreen': '1;32',
'yellow': '1;33',
'lightblue': '1;34',
'lightmagenta': '1;35',
'lightcyan': '1;36',
'white': '1;37'}
if sys.platform == 'win32' and _IPython.OutStream is None:
# On Windows do not colorize text unless in IPython
return text
color_code = color_mapping.get(color, '0;39')
return '\033[{0}m{1}\033[0m'.format(color_code, text)
def _decode_preferred_encoding(s):
"""Decode the supplied byte string using the preferred encoding
for the locale (`locale.getpreferredencoding`) or, if the default encoding
is invalid, fall back first on utf-8, then on latin-1 if the message cannot
be decoded with utf-8.
"""
enc = locale.getpreferredencoding()
try:
try:
return s.decode(enc)
except LookupError:
enc = _DEFAULT_ENCODING
return s.decode(enc)
except UnicodeDecodeError:
return s.decode('latin-1')
def _write_with_fallback(s, write, fileobj):
"""Write the supplied string with the given write function like
``write(s)``, but use a writer for the locale's preferred encoding in case
of a UnicodeEncodeError. Failing that attempt to write with 'utf-8' or
'latin-1'.
"""
if (_IPython.IOStream is not None and
isinstance(fileobj, _IPython.IOStream)):
# If the output stream is an IPython.utils.io.IOStream object that's
# not going to be very helpful to us since it doesn't raise any
# exceptions when an error occurs writing to its underlying stream.
# There's no advantage to us using IOStream.write directly though;
# instead just write directly to its underlying stream:
write = fileobj.stream.write
try:
write(s)
return write
except UnicodeEncodeError:
# Let's try the next approach...
pass
enc = locale.getpreferredencoding()
try:
Writer = codecs.getwriter(enc)
except LookupError:
Writer = codecs.getwriter(_DEFAULT_ENCODING)
f = Writer(fileobj)
write = f.write
try:
write(s)
return write
except UnicodeEncodeError:
Writer = codecs.getwriter('latin-1')
f = Writer(fileobj)
write = f.write
# If this doesn't work let the exception bubble up; I'm out of ideas
write(s)
return write
def color_print(*args, **kwargs):
"""
Prints colors and styles to the terminal uses ANSI escape
sequences.
::
color_print('This is the color ', 'default', 'GREEN', 'green')
Parameters
----------
positional args : str
The positional arguments come in pairs (*msg*, *color*), where
*msg* is the string to display and *color* is the color to
display it in.
*color* is an ANSI terminal color name. Must be one of:
black, red, green, brown, blue, magenta, cyan, lightgrey,
default, darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white, or '' (the empty string).
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`. If file is not
a tty (as determined by calling its `isatty` member, if one
exists), no coloring will be included.
end : str, optional
The ending of the message. Defaults to ``\\n``. The end will
be printed after resetting any color or font state.
"""
file = kwargs.get('file', _get_stdout())
end = kwargs.get('end', '\n')
write = file.write
if isatty(file) and conf.use_color:
for i in range(0, len(args), 2):
msg = args[i]
if i + 1 == len(args):
color = ''
else:
color = args[i + 1]
if color:
msg = _color_text(msg, color)
# Some file objects support writing unicode sensibly on some Python
# versions; if this fails try creating a writer using the locale's
# preferred encoding. If that fails too give up.
if six.PY2 and isinstance(msg, bytes):
msg = _decode_preferred_encoding(msg)
write = _write_with_fallback(msg, write, file)
write(end)
else:
for i in range(0, len(args), 2):
msg = args[i]
if six.PY2 and isinstance(msg, bytes):
# Support decoding bytes to unicode on Python 2; use the
# preferred encoding for the locale (which is *sometimes*
# sensible)
msg = _decode_preferred_encoding(msg)
write(msg)
write(end)
def strip_ansi_codes(s):
"""
Remove ANSI color codes from the string.
"""
return re.sub('\033\\[([0-9]+)(;[0-9]+)*m', '', s)
def human_time(seconds):
"""
Returns a human-friendly time string that is always exactly 6
characters long.
Depending on the number of seconds given, can be one of::
1w 3d
2d 4h
1h 5m
1m 4s
15s
Will be in color if console coloring is turned on.
Parameters
----------
seconds : int
The number of seconds to represent
Returns
-------
time : str
A human-friendly representation of the given number of seconds
that is always exactly 6 characters.
"""
units = [
('y', 60 * 60 * 24 * 7 * 52),
('w', 60 * 60 * 24 * 7),
('d', 60 * 60 * 24),
('h', 60 * 60),
('m', 60),
('s', 1),
]
seconds = int(seconds)
if seconds < 60:
return ' {0:2d}s'.format(seconds)
for i in range(len(units) - 1):
unit1, limit1 = units[i]
unit2, limit2 = units[i + 1]
if seconds >= limit1:
return '{0:2d}{1}{2:2d}{3}'.format(
seconds // limit1, unit1,
(seconds % limit1) // limit2, unit2)
return ' ~inf'
def human_file_size(size):
"""
Returns a human-friendly string representing a file size
that is 2-4 characters long.
For example, depending on the number of bytes given, can be one
of::
256b
64k
1.1G
Parameters
----------
size : int
The size of the file (in bytes)
Returns
-------
size : str
A human-friendly representation of the size of the file
"""
if hasattr(size, 'unit'):
# Import units only if necessary because the import takes a
# significant time [#4649]
from .. import units as u
size = size.to(u.byte).value
suffixes = ' kMGTPEZY'
if size == 0:
num_scale = 0
else:
num_scale = int(math.floor(math.log(size) / math.log(1000)))
if num_scale > 7:
suffix = '?'
else:
suffix = suffixes[num_scale]
num_scale = int(math.pow(1000, num_scale))
value = size / num_scale
str_value = str(value)
if suffix == ' ':
str_value = str_value[:str_value.index('.')]
elif str_value[2] == '.':
str_value = str_value[:2]
else:
str_value = str_value[:3]
return "{0:>3s}{1}".format(str_value, suffix)
class ProgressBar(six.Iterator):
"""
A class to display a progress bar in the terminal.
It is designed to be used either with the ``with`` statement::
with ProgressBar(len(items)) as bar:
for item in enumerate(items):
bar.update()
or as a generator::
for item in ProgressBar(items):
item.process()
"""
def __init__(self, total_or_items, ipython_widget=False, file=None):
"""
Parameters
----------
total_or_items : int or sequence
If an int, the number of increments in the process being
tracked. If a sequence, the items to iterate over.
ipython_widget : bool, optional
If `True`, the progress bar will display as an IPython
notebook widget.
file : writable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the progress bar will be
completely silent.
"""
if file is None:
file = _get_stdout()
if not ipython_widget and not isatty(file):
self.update = self._silent_update
self._silent = True
else:
self._silent = False
if isiterable(total_or_items):
self._items = iter(total_or_items)
self._total = len(total_or_items)
else:
try:
self._total = int(total_or_items)
except TypeError:
raise TypeError("First argument must be int or sequence")
else:
self._items = iter(range(self._total))
self._file = file
self._start_time = time.time()
self._human_total = human_file_size(self._total)
self._ipython_widget = ipython_widget
self._signal_set = False
if not ipython_widget:
self._should_handle_resize = (
_CAN_RESIZE_TERMINAL and self._file.isatty())
self._handle_resize()
if self._should_handle_resize:
signal.signal(signal.SIGWINCH, self._handle_resize)
self._signal_set = True
self.update(0)
def _handle_resize(self, signum=None, frame=None):
terminal_width = terminal_size(self._file)[1]
self._bar_length = terminal_width - 37
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
if not self._silent:
if exc_type is None:
self.update(self._total)
self._file.write('\n')
self._file.flush()
if self._signal_set:
signal.signal(signal.SIGWINCH, signal.SIG_DFL)
def __iter__(self):
return self
def __next__(self):
try:
rv = next(self._items)
except StopIteration:
self.__exit__(None, None, None)
raise
else:
self.update()
return rv
def update(self, value=None):
"""
Update progress bar via the console or notebook accordingly.
"""
# Update self.value
if value is None:
value = self._current_value + 1
self._current_value = value
# Choose the appropriate environment
if self._ipython_widget:
self._update_ipython_widget(value)
else:
self._update_console(value)
def _update_console(self, value=None):
"""
Update the progress bar to the given value (out of the total
given to the constructor).
"""
if self._total == 0:
frac = 1.0
else:
frac = float(value) / float(self._total)
file = self._file
write = file.write
if frac > 1:
bar_fill = int(self._bar_length)
else:
bar_fill = int(float(self._bar_length) * frac)
write('\r|')
color_print('=' * bar_fill, 'blue', file=file, end='')
if bar_fill < self._bar_length:
color_print('>', 'green', file=file, end='')
write('-' * (self._bar_length - bar_fill - 1))
write('|')
if value >= self._total:
t = time.time() - self._start_time
prefix = ' '
elif value <= 0:
t = None
prefix = ''
else:
t = ((time.time() - self._start_time) * (1.0 - frac)) / frac
prefix = ' ETA '
write(' {0:>4s}/{1:>4s}'.format(
human_file_size(value),
self._human_total))
write(' ({:>6.2%})'.format(frac))
write(prefix)
if t is not None:
write(human_time(t))
self._file.flush()
def _update_ipython_widget(self, value=None):
"""
Update the progress bar to the given value (out of a total
given to the constructor).
This method is for use in the IPython notebook 2+.
"""
# Create and display an empty progress bar widget,
# if none exists.
if not hasattr(self, '_widget'):
# Import only if an IPython widget, i.e., widget in iPython NB
from IPython import version_info
if version_info[0] < 4:
from IPython.html import widgets
self._widget = widgets.FloatProgressWidget()
else:
_IPython.get_ipython()
from ipywidgets import widgets
self._widget = widgets.FloatProgress()
from IPython.display import display
display(self._widget)
self._widget.value = 0
# Calculate percent completion, and update progress bar
frac = (value/self._total)
self._widget.value = frac * 100
self._widget.description =' ({:>6.2%})'.format(frac)
def _silent_update(self, value=None):
pass
@classmethod
def map(cls, function, items, multiprocess=False, file=None, step=100):
"""
Does a `map` operation while displaying a progress bar with
percentage complete.
::
def work(i):
print(i)
ProgressBar.map(work, range(50))
Parameters
----------
function : function
Function to call for each step
items : sequence
Sequence where each element is a tuple of arguments to pass to
*function*.
multiprocess : bool, optional
If `True`, use the `multiprocessing` module to distribute each
task to a different processor core.
file : writeable file-like object, optional
The file to write the progress bar to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any), the scrollbar will
be completely silent.
step : int, optional
Update the progress bar at least every *step* steps (default: 100).
If ``multiprocess`` is `True`, this will affect the size
of the chunks of ``items`` that are submitted as separate tasks
to the process pool. A large step size may make the job
complete faster if ``items`` is very long.
"""
results = []
if file is None:
file = _get_stdout()
with cls(len(items), file=file) as bar:
default_step = max(int(float(len(items)) / bar._bar_length), 1)
chunksize = min(default_step, step)
if not multiprocess:
for i, item in enumerate(items):
results.append(function(item))
if (i % chunksize) == 0:
bar.update(i)
else:
p = multiprocessing.Pool()
for i, result in enumerate(
p.imap_unordered(function, items, chunksize=chunksize)):
bar.update(i)
results.append(result)
p.close()
p.join()
return results
class Spinner(object):
"""
A class to display a spinner in the terminal.
It is designed to be used with the ``with`` statement::
with Spinner("Reticulating splines", "green") as s:
for item in enumerate(items):
s.next()
"""
_default_unicode_chars = "◓◑◒◐"
_default_ascii_chars = "-/|\\"
def __init__(self, msg, color='default', file=None, step=1,
chars=None):
"""
Parameters
----------
msg : str
The message to print
color : str, optional
An ANSI terminal color name. Must be one of: black, red,
green, brown, blue, magenta, cyan, lightgrey, default,
darkgrey, lightred, lightgreen, yellow, lightblue,
lightmagenta, lightcyan, white.
file : writeable file-like object, optional
The file to write the spinner to. Defaults to
`sys.stdout`. If ``file`` is not a tty (as determined by
calling its `isatty` member, if any, or special case hacks
to detect the IPython console), the spinner will be
completely silent.
step : int, optional
Only update the spinner every *step* steps
chars : str, optional
The character sequence to use for the spinner
"""
if file is None:
file = _get_stdout()
self._msg = msg
self._color = color
self._file = file
self._step = step
if chars is None:
if conf.unicode_output:
chars = self._default_unicode_chars
else:
chars = self._default_ascii_chars
self._chars = chars
self._silent = not isatty(file)
def _iterator(self):
chars = self._chars
index = 0
file = self._file
write = file.write
flush = file.flush
try_fallback = True
while True:
write('\r')
color_print(self._msg, self._color, file=file, end='')
write(' ')
try:
if try_fallback:
write = _write_with_fallback(chars[index], write, file)
else:
write(chars[index])
except UnicodeError:
# If even _write_with_fallback failed for any reason just give
# up on trying to use the unicode characters
chars = self._default_ascii_chars
write(chars[index])
try_fallback = False # No good will come of using this again
flush()
yield
for i in range(self._step):
yield
index = (index + 1) % len(chars)
def __enter__(self):
if self._silent:
return self._silent_iterator()
else:
return self._iterator()
def __exit__(self, exc_type, exc_value, traceback):
file = self._file
write = file.write
flush = file.flush
if not self._silent:
write('\r')
color_print(self._msg, self._color, file=file, end='')
if exc_type is None:
color_print(' [Done]', 'green', file=file)
else:
color_print(' [Failed]', 'red', file=file)
flush()
def _silent_iterator(self):
color_print(self._msg, self._color, file=self._file, end='')
self._file.flush()
while True:
yield
class ProgressBarOrSpinner(object):
"""
A class that displays either a `ProgressBar` or `Spinner`
depending on whether the total size of the operation is
known or not.
It is designed to be used with the ``with`` statement::
if file.has_length():
length = file.get_length()
else:
length = None
bytes_read = 0
with ProgressBarOrSpinner(length) as bar:
while file.read(blocksize):
bytes_read += blocksize
bar.update(bytes_read)
"""
def __init__(self, total, msg, color='default', file=None):
"""
Parameters
----------
total : int or None
If an int, the number of increments in the process being
tracked and a `ProgressBar` is displayed. If `None`, a
`Spinner` is displayed.
msg : str
The message to display above the `ProgressBar` or
alongside the `Spinner`.
color : str, optional
The color of ``msg``, if any. Must be an ANSI terminal
color name. Must be one of: black, red, green, brown,
blue, magenta, cyan, lightgrey, default, darkgrey,
lightred, lightgreen, yellow, lightblue, lightmagenta,
lightcyan, white.
file : writable file-like object, optional
The file to write the to. Defaults to `sys.stdout`. If
``file`` is not a tty (as determined by calling its `isatty`
member, if any), only ``msg`` will be displayed: the
`ProgressBar` or `Spinner` will be silent.
"""
if file is None:
file = _get_stdout()
if total is None or not isatty(file):
self._is_spinner = True
self._obj = Spinner(msg, color=color, file=file)
else:
self._is_spinner = False
color_print(msg, color, file=file)
self._obj = ProgressBar(total, file=file)
def __enter__(self):
self._iter = self._obj.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
return self._obj.__exit__(exc_type, exc_value, traceback)
def update(self, value):
"""
Update the progress bar to the given value (out of the total
given to the constructor.
"""
if self._is_spinner:
next(self._iter)
else:
self._obj.update(value)
def print_code_line(line, col=None, file=None, tabwidth=8, width=70):
"""
Prints a line of source code, highlighting a particular character
position in the line. Useful for displaying the context of error
messages.
If the line is more than ``width`` characters, the line is truncated
accordingly and '…' characters are inserted at the front and/or
end.
It looks like this::
there_is_a_syntax_error_here :
^
Parameters
----------
line : unicode
The line of code to display
col : int, optional
The character in the line to highlight. ``col`` must be less
than ``len(line)``.
file : writeable file-like object, optional
Where to write to. Defaults to `sys.stdout`.
tabwidth : int, optional
The number of spaces per tab (``'\\t'``) character. Default
is 8. All tabs will be converted to spaces to ensure that the
caret lines up with the correct column.
width : int, optional
The width of the display, beyond which the line will be
truncated. Defaults to 70 (this matches the default in the
standard library's `textwrap` module).
"""
if file is None:
file = _get_stdout()
if conf.unicode_output:
ellipsis = '…'
else:
ellipsis = '...'
write = file.write
if col is not None:
assert col < len(line)
ntabs = line[:col].count('\t')
col += ntabs * (tabwidth - 1)
line = line.rstrip('\n')
line = line.replace('\t', ' ' * tabwidth)
if col is not None and col > width:
new_col = min(width // 2, len(line) - col)
offset = col - new_col
line = line[offset + len(ellipsis):]
width -= len(ellipsis)
new_col = col
col -= offset
color_print(ellipsis, 'darkgrey', file=file, end='')
if len(line) > width:
write(line[:width - len(ellipsis)])
color_print(ellipsis, 'darkgrey', file=file)
else:
write(line)
write('\n')
if col is not None:
write(' ' * col)
color_print('^', 'red', file=file)
# The following four Getch* classes implement unbuffered character reading from
# stdin on Windows, linux, MacOSX. This is taken directly from ActiveState
# Code Recipes:
# http://code.activestate.com/recipes/134892-getch-like-unbuffered-character-reading-from-stdin/
#
class Getch(object):
"""Get a single character from standard input without screen echo.
Returns
-------
char : str (one character)
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
try:
self.impl = _GetchMacCarbon()
except (ImportError, AttributeError):
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix(object):
def __init__(self):
import tty # pylint: disable=W0611
import sys # pylint: disable=W0611
# import termios now or else you'll get the Unix
# version on the Mac
import termios # pylint: disable=W0611
def __call__(self):
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows(object):
def __init__(self):
import msvcrt # pylint: disable=W0611
def __call__(self):
import msvcrt
return msvcrt.getch()
class _GetchMacCarbon(object):
"""
A function which returns the current ASCII key that is down;
if no ASCII key is down, the null string is returned. The
page http://www.mactech.com/macintosh-c/chap02-1.html was
very helpful in figuring out how to do this.
"""
def __init__(self):
import Carbon
Carbon.Evt # see if it has this (in Unix, it doesn't)
def __call__(self):
import Carbon
if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask
return ''
else:
#
# The event contains the following info:
# (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1]
#
# The message (msg) contains the ASCII char which is
# extracted with the 0x000000FF charCodeMask; this
# number is converted to an ASCII character with chr() and
# returned
#
(what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
|
joergdietrich/astropy
|
astropy/utils/console.py
|
Python
|
bsd-3-clause
| 33,248 | 0.000211 |
#
# Copyright 2013 eNovance <licensing@enovance.com>
#
# Authors: Mehdi Abaakouk <mehdi.abaakouk@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
from ceilometer.alarm import evaluator
from ceilometer.i18n import _
from ceilometer.openstack.common import log
LOG = log.getLogger(__name__)
COMPARATORS = {'and': all, 'or': any}
class CombinationEvaluator(evaluator.Evaluator):
def _get_alarm_state(self, alarm_id):
try:
alarm = self._client.alarms.get(alarm_id)
except Exception:
LOG.exception(_('alarm retrieval failed'))
return None
return alarm.state
def _sufficient_states(self, alarm, states):
"""Check for the sufficiency of the data for evaluation.
Ensure that there is sufficient data for evaluation,
transitioning to unknown otherwise.
"""
# note(sileht): alarm can be evaluated only with
# stable state of other alarm
alarms_missing_states = [alarm_id for alarm_id, state in states
if not state or state == evaluator.UNKNOWN]
sufficient = len(alarms_missing_states) == 0
if not sufficient and alarm.rule['operator'] == 'or':
# if operator is 'or' and there is one alarm, then the combinated
# alarm's state should be 'alarm'
sufficient = bool([alarm_id for alarm_id, state in states
if state == evaluator.ALARM])
if not sufficient and alarm.state != evaluator.UNKNOWN:
reason = (_('Alarms %(alarm_ids)s'
' are in unknown state') %
{'alarm_ids': ",".join(alarms_missing_states)})
reason_data = self._reason_data(alarms_missing_states)
self._refresh(alarm, evaluator.UNKNOWN, reason, reason_data)
return sufficient
@staticmethod
def _reason_data(alarm_ids):
"""Create a reason data dictionary for this evaluator type."""
return {'type': 'combination', 'alarm_ids': alarm_ids}
@classmethod
def _reason(cls, alarm, state, underlying_states):
"""Fabricate reason string."""
transition = alarm.state != state
alarms_to_report = [alarm_id for alarm_id, alarm_state
in underlying_states
if alarm_state == state]
reason_data = cls._reason_data(alarms_to_report)
if transition:
return (_('Transition to %(state)s due to alarms'
' %(alarm_ids)s in state %(state)s') %
{'state': state,
'alarm_ids': ",".join(alarms_to_report)}), reason_data
return (_('Remaining as %(state)s due to alarms'
' %(alarm_ids)s in state %(state)s') %
{'state': state,
'alarm_ids': ",".join(alarms_to_report)}), reason_data
def _transition(self, alarm, underlying_states):
"""Transition alarm state if necessary."""
op = alarm.rule['operator']
if COMPARATORS[op](s == evaluator.ALARM
for __, s in underlying_states):
state = evaluator.ALARM
else:
state = evaluator.OK
continuous = alarm.repeat_actions
reason, reason_data = self._reason(alarm, state, underlying_states)
if alarm.state != state or continuous:
self._refresh(alarm, state, reason, reason_data)
def evaluate(self, alarm):
if not self.within_time_constraint(alarm):
LOG.debug(_('Attempted to evaluate alarm %s, but it is not '
'within its time constraint.') % alarm.alarm_id)
return
states = zip(alarm.rule['alarm_ids'],
itertools.imap(self._get_alarm_state,
alarm.rule['alarm_ids']))
if self._sufficient_states(alarm, states):
self._transition(alarm, states)
|
yanheven/ceilometer
|
ceilometer/alarm/evaluator/combination.py
|
Python
|
apache-2.0
| 4,511 | 0 |
class Solution(object):
def findMaxConsecutiveOnes(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
# sliding window
max_len = 0
li = 0
ri = 0
l = 0
inserted = False
for ri, rval in enumerate(nums):
if rval == 1:
l += 1
max_len = max(max_len, l)
else:
if not inserted:
inserted = True
l += 1
max_len = max(max_len, l)
else:
while nums[li] == 1:
li += 1
li += 1
l = ri-li+1
max_len = max(max_len, l)
return max_len
|
daicang/Leetcode-solutions
|
487-max-consecutive-ones-ii.py
|
Python
|
mit
| 773 | 0.006468 |
from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E08000032'
addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.tsvJune2017.tsv'
stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017.tsvJune2017.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
|
chris48s/UK-Polling-Stations
|
polling_stations/apps/data_collection/management/commands/import_bradford.py
|
Python
|
bsd-3-clause
| 408 | 0.009804 |
# -*- coding: utf-8 -*-
##############################################################################
##############################################################################
{
'name': 'Account Banking Netixis Direct Debit',
'summary': 'Create Natixis files for Direct Debit',
'version': '7.0.0.2.0',
'license': 'AGPL-3',
'author': "Mind And Go",
'website': "http://www.mind-and-go.com",
'category': 'Banking addons',
'depends': [
'account_direct_debit',
'account_banking_pain_base',
'account_payment_partner',
'account'
],
'external_dependencies': {
'python': ['unidecode', 'lxml'],
},
'data': [
'views/account_banking_natixis_view.xml',
'views/company_view.xml',
'wizard/export_natixis_view.xml',
'security/ir.model.access.csv',
'views/invoice.xml',
'data/payment_type_natixis.xml',
'views/account_payment_view.xml',
'views/partner.xml',
'views/natixis_file_sequence.xml',
],
'demo': ['sepa_direct_debit_demo.xml'],
'description': '''
Module to export direct debit payment orders in Natixis TXT file format.
''',
'active': False,
'installable': True,
}
|
noemis-fr/custom
|
account_banking_natixis_direct_debit/__openerp__.py
|
Python
|
gpl-3.0
| 1,248 | 0 |
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Mathieu Blondel <mathieu@mblondel.org>
# Olivier Grisel <olivier.grisel@ensta.org>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# Joel Nothman <joel.nothman@gmail.com>
# Hamzeh Alsalhi <ha258@cornell.edu>
# License: BSD 3 clause
from collections import defaultdict
import itertools
import array
import warnings
import numpy as np
import scipy.sparse as sp
from ..base import BaseEstimator, TransformerMixin
from ..utils.fixes import np_version
from ..utils.fixes import sparse_min_max
from ..utils.fixes import astype
from ..utils.fixes import in1d
from ..utils import deprecated, column_or_1d
from ..utils.validation import check_array
from ..utils.multiclass import unique_labels
from ..utils.multiclass import type_of_target
from ..externals import six
zip = six.moves.zip
map = six.moves.map
__all__ = [
'label_binarize',
'LabelBinarizer',
'LabelEncoder',
]
def _check_numpy_unicode_bug(labels):
"""Check that user is not subject to an old numpy bug
Fixed in master before 1.7.0:
https://github.com/numpy/numpy/pull/243
"""
if np_version[:3] < (1, 7, 0) and labels.dtype.kind == 'U':
raise RuntimeError("NumPy < 1.7.0 does not implement searchsorted"
" on unicode data correctly. Please upgrade"
" NumPy to use LabelEncoder with unicode inputs.")
class LabelEncoder(BaseEstimator, TransformerMixin):
"""Encode labels with value between 0 and n_classes-1.
Attributes
----------
classes_ : array of shape (n_class,)
Holds the label for each class.
Examples
--------
`LabelEncoder` can be used to normalize labels.
>>> from sklearn import preprocessing
>>> le = preprocessing.LabelEncoder()
>>> le.fit([1, 2, 2, 6])
LabelEncoder()
>>> le.classes_
array([1, 2, 6])
>>> le.transform([1, 1, 2, 6]) #doctest: +ELLIPSIS
array([0, 0, 1, 2]...)
>>> le.inverse_transform([0, 0, 1, 2])
array([1, 1, 2, 6])
It can also be used to transform non-numerical labels (as long as they are
hashable and comparable) to numerical labels.
>>> le = preprocessing.LabelEncoder()
>>> le.fit(["paris", "paris", "tokyo", "amsterdam"])
LabelEncoder()
>>> list(le.classes_)
['amsterdam', 'paris', 'tokyo']
>>> le.transform(["tokyo", "tokyo", "paris"]) #doctest: +ELLIPSIS
array([2, 2, 1]...)
>>> list(le.inverse_transform([2, 2, 1]))
['tokyo', 'tokyo', 'paris']
"""
def _check_fitted(self):
if not hasattr(self, "classes_"):
raise ValueError("LabelEncoder was not fitted yet.")
def fit(self, y):
"""Fit label encoder
Parameters
----------
y : array-like of shape (n_samples,)
Target values.
Returns
-------
self : returns an instance of self.
"""
y = column_or_1d(y, warn=True)
_check_numpy_unicode_bug(y)
self.classes_ = np.unique(y)
return self
def fit_transform(self, y):
"""Fit label encoder and return encoded labels
Parameters
----------
y : array-like of shape [n_samples]
Target values.
Returns
-------
y : array-like of shape [n_samples]
"""
y = column_or_1d(y, warn=True)
_check_numpy_unicode_bug(y)
self.classes_, y = np.unique(y, return_inverse=True)
return y
def transform(self, y):
"""Transform labels to normalized encoding.
Parameters
----------
y : array-like of shape [n_samples]
Target values.
Returns
-------
y : array-like of shape [n_samples]
"""
self._check_fitted()
classes = np.unique(y)
_check_numpy_unicode_bug(classes)
if len(np.intersect1d(classes, self.classes_)) < len(classes):
diff = np.setdiff1d(classes, self.classes_)
raise ValueError("y contains new labels: %s" % str(diff))
return np.searchsorted(self.classes_, y)
def inverse_transform(self, y):
"""Transform labels back to original encoding.
Parameters
----------
y : numpy array of shape [n_samples]
Target values.
Returns
-------
y : numpy array of shape [n_samples]
"""
self._check_fitted()
y = np.asarray(y)
return self.classes_[y]
class LabelBinarizer(BaseEstimator, TransformerMixin):
"""Binarize labels in a one-vs-all fashion
Several regression and binary classification algorithms are
available in the scikit. A simple way to extend these algorithms
to the multi-class classification case is to use the so-called
one-vs-all scheme.
At learning time, this simply consists in learning one regressor
or binary classifier per class. In doing so, one needs to convert
multi-class labels to binary labels (belong or does not belong
to the class). LabelBinarizer makes this process easy with the
transform method.
At prediction time, one assigns the class for which the corresponding
model gave the greatest confidence. LabelBinarizer makes this easy
with the inverse_transform method.
Parameters
----------
neg_label : int (default: 0)
Value with which negative labels must be encoded.
pos_label : int (default: 1)
Value with which positive labels must be encoded.
sparse_output : boolean (default: False)
True if the returned array from transform is desired to be in sparse
CSR format.
Attributes
----------
classes_ : array of shape [n_class]
Holds the label for each class.
y_type_ : str,
Represents the type of the target data as evaluated by
utils.multiclass.type_of_target. Possible type are 'continuous',
'continuous-multioutput', 'binary', 'multiclass',
'mutliclass-multioutput', 'multilabel-sequences',
'multilabel-indicator', and 'unknown'.
multilabel_ : boolean
True if the transformer was fitted on a multilabel rather than a
multiclass set of labels. The multilabel_ attribute is deprecated
and will be removed in 0.18
sparse_input_ : boolean,
True if the input data to transform is given as a sparse matrix, False
otherwise.
indicator_matrix_ : str
'sparse' when the input data to tansform is a multilable-indicator and
is sparse, None otherwise. The indicator_matrix_ attribute is
deprecated as of version 0.16 and will be removed in 0.18
Examples
--------
>>> from sklearn import preprocessing
>>> lb = preprocessing.LabelBinarizer()
>>> lb.fit([1, 2, 6, 4, 2])
LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False)
>>> lb.classes_
array([1, 2, 4, 6])
>>> lb.transform([1, 6])
array([[1, 0, 0, 0],
[0, 0, 0, 1]])
Binary targets transform to a column vector
>>> lb = preprocessing.LabelBinarizer()
>>> lb.fit_transform(['yes', 'no', 'no', 'yes'])
array([[1],
[0],
[0],
[1]])
Passing a 2D matrix for multilabel classification
>>> import numpy as np
>>> lb.fit(np.array([[0, 1, 1], [1, 0, 0]]))
LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False)
>>> lb.classes_
array([0, 1, 2])
>>> lb.transform([0, 1, 2, 1])
array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0]])
See also
--------
label_binarize : function to perform the transform operation of
LabelBinarizer with fixed classes.
"""
def __init__(self, neg_label=0, pos_label=1, sparse_output=False):
if neg_label >= pos_label:
raise ValueError("neg_label={0} must be strictly less than "
"pos_label={1}.".format(neg_label, pos_label))
if sparse_output and (pos_label == 0 or neg_label != 0):
raise ValueError("Sparse binarization is only supported with non "
"zero pos_label and zero neg_label, got "
"pos_label={0} and neg_label={1}"
"".format(pos_label, neg_label))
self.neg_label = neg_label
self.pos_label = pos_label
self.sparse_output = sparse_output
@property
@deprecated("Attribute indicator_matrix_ is deprecated and will be "
"removed in 0.17. Use 'y_type_ == 'multilabel-indicator'' "
"instead")
def indicator_matrix_(self):
return self.y_type_ == 'multilabel-indicator'
@property
@deprecated("Attribute multilabel_ is deprecated and will be removed "
"in 0.17. Use 'y_type_.startswith('multilabel')' "
"instead")
def multilabel_(self):
return self.y_type_.startswith('multilabel')
def _check_fitted(self):
if not hasattr(self, "classes_"):
raise ValueError("LabelBinarizer was not fitted yet.")
def fit(self, y):
"""Fit label binarizer
Parameters
----------
y : numpy array of shape (n_samples,) or (n_samples, n_classes)
Target values. The 2-d matrix should only contain 0 and 1,
represents multilabel classification.
Returns
-------
self : returns an instance of self.
"""
self.y_type_ = type_of_target(y)
if 'multioutput' in self.y_type_:
raise ValueError("Multioutput target data is not supported with "
"label binarization")
self.sparse_input_ = sp.issparse(y)
self.classes_ = unique_labels(y)
return self
def transform(self, y):
"""Transform multi-class labels to binary labels
The output of transform is sometimes referred to by some authors as the
1-of-K coding scheme.
Parameters
----------
y : numpy array or sparse matrix of shape (n_samples,) or
(n_samples, n_classes) Target values. The 2-d matrix should only
contain 0 and 1, represents multilabel classification. Sparse
matrix can be CSR, CSC, COO, DOK, or LIL.
Returns
-------
Y : numpy array or CSR matrix of shape [n_samples, n_classes]
Shape will be [n_samples, 1] for binary problems.
"""
self._check_fitted()
y_is_multilabel = type_of_target(y).startswith('multilabel')
if y_is_multilabel and not self.y_type_.startswith('multilabel'):
raise ValueError("The object was not fitted with multilabel"
" input.")
return label_binarize(y, self.classes_,
pos_label=self.pos_label,
neg_label=self.neg_label,
sparse_output=self.sparse_output)
def inverse_transform(self, Y, threshold=None):
"""Transform binary labels back to multi-class labels
Parameters
----------
Y : numpy array or sparse matrix with shape [n_samples, n_classes]
Target values. All sparse matrices are converted to CSR before
inverse transformation.
threshold : float or None
Threshold used in the binary and multi-label cases.
Use 0 when:
- Y contains the output of decision_function (classifier)
Use 0.5 when:
- Y contains the output of predict_proba
If None, the threshold is assumed to be half way between
neg_label and pos_label.
Returns
-------
y : numpy array or CSR matrix of shape [n_samples] Target values.
Notes
-----
In the case when the binary labels are fractional
(probabilistic), inverse_transform chooses the class with the
greatest value. Typically, this allows to use the output of a
linear model's decision_function method directly as the input
of inverse_transform.
"""
self._check_fitted()
if threshold is None:
threshold = (self.pos_label + self.neg_label) / 2.
if self.y_type_ == "multiclass":
y_inv = _inverse_binarize_multiclass(Y, self.classes_)
else:
y_inv = _inverse_binarize_thresholding(Y, self.y_type_,
self.classes_, threshold)
if self.sparse_input_:
y_inv = sp.csr_matrix(y_inv)
elif sp.issparse(y_inv):
y_inv = y_inv.toarray()
return y_inv
def label_binarize(y, classes, neg_label=0, pos_label=1,
sparse_output=False, multilabel=None):
"""Binarize labels in a one-vs-all fashion
Several regression and binary classification algorithms are
available in the scikit. A simple way to extend these algorithms
to the multi-class classification case is to use the so-called
one-vs-all scheme.
This function makes it possible to compute this transformation for a
fixed set of class labels known ahead of time.
Parameters
----------
y : array-like
Sequence of integer labels or multilabel data to encode.
classes : array-like of shape [n_classes]
Uniquely holds the label for each class.
neg_label : int (default: 0)
Value with which negative labels must be encoded.
pos_label : int (default: 1)
Value with which positive labels must be encoded.
sparse_output : boolean (default: False),
Set to true if output binary array is desired in CSR sparse format
Returns
-------
Y : numpy array or CSR matrix of shape [n_samples, n_classes]
Shape will be [n_samples, 1] for binary problems.
Examples
--------
>>> from sklearn.preprocessing import label_binarize
>>> label_binarize([1, 6], classes=[1, 2, 4, 6])
array([[1, 0, 0, 0],
[0, 0, 0, 1]])
The class ordering is preserved:
>>> label_binarize([1, 6], classes=[1, 6, 4, 2])
array([[1, 0, 0, 0],
[0, 1, 0, 0]])
Binary targets transform to a column vector
>>> label_binarize(['yes', 'no', 'no', 'yes'], classes=['no', 'yes'])
array([[1],
[0],
[0],
[1]])
See also
--------
LabelBinarizer : class used to wrap the functionality of label_binarize and
allow for fitting to classes independently of the transform operation
"""
if not isinstance(y, list):
# XXX Workaround that will be removed when list of list format is
# dropped
y = check_array(y, accept_sparse='csr', ensure_2d=False)
if neg_label >= pos_label:
raise ValueError("neg_label={0} must be strictly less than "
"pos_label={1}.".format(neg_label, pos_label))
if (sparse_output and (pos_label == 0 or neg_label != 0)):
raise ValueError("Sparse binarization is only supported with non "
"zero pos_label and zero neg_label, got "
"pos_label={0} and neg_label={1}"
"".format(pos_label, neg_label))
if multilabel is not None:
warnings.warn("The multilabel parameter is deprecated as of version "
"0.15 and will be removed in 0.17. The parameter is no "
"longer necessary because the value is automatically "
"inferred.", DeprecationWarning)
# To account for pos_label == 0 in the dense case
pos_switch = pos_label == 0
if pos_switch:
pos_label = -neg_label
y_type = type_of_target(y)
if 'multioutput' in y_type:
raise ValueError("Multioutput target data is not supported with label "
"binarization")
n_samples = y.shape[0] if sp.issparse(y) else len(y)
n_classes = len(classes)
classes = np.asarray(classes)
if y_type == "binary":
if len(classes) == 1:
Y = np.zeros((len(y), 1), dtype=np.int)
Y += neg_label
return Y
elif len(classes) >= 3:
y_type = "multiclass"
sorted_class = np.sort(classes)
if (y_type == "multilabel-indicator" and classes.size != y.shape[1]):
raise ValueError("classes {0} missmatch with the labels {1}"
"found in the data".format(classes, unique_labels(y)))
if y_type in ("binary", "multiclass"):
y = column_or_1d(y)
# pick out the known labels from y
y_in_classes = in1d(y, classes)
y_seen = y[y_in_classes]
indices = np.searchsorted(sorted_class, y_seen)
indptr = np.hstack((0, np.cumsum(y_in_classes)))
data = np.empty_like(indices)
data.fill(pos_label)
Y = sp.csr_matrix((data, indices, indptr),
shape=(n_samples, n_classes))
elif y_type == "multilabel-indicator":
Y = sp.csr_matrix(y)
if pos_label != 1:
data = np.empty_like(Y.data)
data.fill(pos_label)
Y.data = data
elif y_type == "multilabel-sequences":
Y = MultiLabelBinarizer(classes=classes,
sparse_output=sparse_output).fit_transform(y)
if sp.issparse(Y):
Y.data[:] = pos_label
else:
Y[Y == 1] = pos_label
return Y
if not sparse_output:
Y = Y.toarray()
Y = astype(Y, int, copy=False)
if neg_label != 0:
Y[Y == 0] = neg_label
if pos_switch:
Y[Y == pos_label] = 0
else:
Y.data = astype(Y.data, int, copy=False)
# preserve label ordering
if np.any(classes != sorted_class):
indices = np.argsort(classes)
Y = Y[:, indices]
if y_type == "binary":
if sparse_output:
Y = Y.getcol(-1)
else:
Y = Y[:, -1].reshape((-1, 1))
return Y
def _inverse_binarize_multiclass(y, classes):
"""Inverse label binarization transformation for multiclass.
Multiclass uses the maximal score instead of a threshold.
"""
classes = np.asarray(classes)
if sp.issparse(y):
# Find the argmax for each row in y where y is a CSR matrix
y = y.tocsr()
n_samples, n_outputs = y.shape
outputs = np.arange(n_outputs)
row_max = sparse_min_max(y, 1)[1]
row_nnz = np.diff(y.indptr)
y_data_repeated_max = np.repeat(row_max, row_nnz)
# picks out all indices obtaining the maximum per row
y_i_all_argmax = np.flatnonzero(y_data_repeated_max == y.data)
# For corner case where last row has a max of 0
if row_max[-1] == 0:
y_i_all_argmax = np.append(y_i_all_argmax, [len(y.data)])
# Gets the index of the first argmax in each row from y_i_all_argmax
index_first_argmax = np.searchsorted(y_i_all_argmax, y.indptr[:-1])
# first argmax of each row
y_ind_ext = np.append(y.indices, [0])
y_i_argmax = y_ind_ext[y_i_all_argmax[index_first_argmax]]
# Handle rows of all 0
y_i_argmax[np.where(row_nnz == 0)[0]] = 0
# Handles rows with max of 0 that contain negative numbers
samples = np.arange(n_samples)[(row_nnz > 0) &
(row_max.ravel() == 0)]
for i in samples:
ind = y.indices[y.indptr[i]:y.indptr[i+1]]
y_i_argmax[i] = classes[np.setdiff1d(outputs, ind)][0]
return classes[y_i_argmax]
else:
return classes.take(y.argmax(axis=1), mode="clip")
def _inverse_binarize_thresholding(y, output_type, classes, threshold):
"""Inverse label binarization transformation using thresholding."""
if output_type == "binary" and y.ndim == 2 and y.shape[1] > 2:
raise ValueError("output_type='binary', but y.shape = {0}".
format(y.shape))
if output_type != "binary" and y.shape[1] != len(classes):
raise ValueError("The number of class is not equal to the number of "
"dimension of y.")
classes = np.asarray(classes)
# Perform thresholding
if sp.issparse(y):
if threshold > 0:
if y.format not in ('csr', 'csc'):
y = y.tocsr()
y.data = np.array(y.data > threshold, dtype=np.int)
y.eliminate_zeros()
else:
y = np.array(y.toarray() > threshold, dtype=np.int)
else:
y = np.array(y > threshold, dtype=np.int)
# Inverse transform data
if output_type == "binary":
if sp.issparse(y):
y = y.toarray()
if y.ndim == 2 and y.shape[1] == 2:
return classes[y[:, 1]]
else:
if len(classes) == 1:
y = np.empty(len(y), dtype=classes.dtype)
y.fill(classes[0])
return y
else:
return classes[y.ravel()]
elif output_type == "multilabel-indicator":
return y
elif output_type == "multilabel-sequences":
warnings.warn('Direct support for sequence of sequences multilabel '
'representation will be unavailable from version 0.17. '
'Use sklearn.preprocessing.MultiLabelBinarizer to '
'convert to a label indicator representation.',
DeprecationWarning)
mlb = MultiLabelBinarizer(classes=classes).fit([])
return mlb.inverse_transform(y)
else:
raise ValueError("{0} format is not supported".format(output_type))
class MultiLabelBinarizer(BaseEstimator, TransformerMixin):
"""Transform between iterable of iterables and a multilabel format
Although a list of sets or tuples is a very intuitive format for multilabel
data, it is unwieldy to process. This transformer converts between this
intuitive format and the supported multilabel format: a (samples x classes)
binary matrix indicating the presence of a class label.
Parameters
----------
classes : array-like of shape [n_classes] (optional)
Indicates an ordering for the class labels
sparse_output : boolean (default: False),
Set to true if output binary array is desired in CSR sparse format
Attributes
----------
classes_ : array of labels
A copy of the `classes` parameter where provided,
or otherwise, the sorted set of classes found when fitting.
Examples
--------
>>> mlb = MultiLabelBinarizer()
>>> mlb.fit_transform([(1, 2), (3,)])
array([[1, 1, 0],
[0, 0, 1]])
>>> mlb.classes_
array([1, 2, 3])
>>> mlb.fit_transform([set(['sci-fi', 'thriller']), set(['comedy'])])
array([[0, 1, 1],
[1, 0, 0]])
>>> list(mlb.classes_)
['comedy', 'sci-fi', 'thriller']
"""
def __init__(self, classes=None, sparse_output=False):
self.classes = classes
self.sparse_output = sparse_output
def fit(self, y):
"""Fit the label sets binarizer, storing `classes_`
Parameters
----------
y : iterable of iterables
A set of labels (any orderable and hashable object) for each
sample. If the `classes` parameter is set, `y` will not be
iterated.
Returns
-------
self : returns this MultiLabelBinarizer instance
"""
if self.classes is None:
classes = sorted(set(itertools.chain.from_iterable(y)))
else:
classes = self.classes
dtype = np.int if all(isinstance(c, int) for c in classes) else object
self.classes_ = np.empty(len(classes), dtype=dtype)
self.classes_[:] = classes
return self
def fit_transform(self, y):
"""Fit the label sets binarizer and transform the given label sets
Parameters
----------
y : iterable of iterables
A set of labels (any orderable and hashable object) for each
sample. If the `classes` parameter is set, `y` will not be
iterated.
Returns
-------
y_indicator : array or CSR matrix, shape (n_samples, n_classes)
A matrix such that `y_indicator[i, j] = 1` iff `classes_[j]` is in
`y[i]`, and 0 otherwise.
"""
if self.classes is not None:
return self.fit(y).transform(y)
# Automatically increment on new class
class_mapping = defaultdict(int)
class_mapping.default_factory = class_mapping.__len__
yt = self._transform(y, class_mapping)
# sort classes and reorder columns
tmp = sorted(class_mapping, key=class_mapping.get)
# (make safe for tuples)
dtype = np.int if all(isinstance(c, int) for c in tmp) else object
class_mapping = np.empty(len(tmp), dtype=dtype)
class_mapping[:] = tmp
self.classes_, inverse = np.unique(class_mapping, return_inverse=True)
yt.indices = np.take(inverse, yt.indices)
if not self.sparse_output:
yt = yt.toarray()
return yt
def transform(self, y):
"""Transform the given label sets
Parameters
----------
y : iterable of iterables
A set of labels (any orderable and hashable object) for each
sample. If the `classes` parameter is set, `y` will not be
iterated.
Returns
-------
y_indicator : array or CSR matrix, shape (n_samples, n_classes)
A matrix such that `y_indicator[i, j] = 1` iff `classes_[j]` is in
`y[i]`, and 0 otherwise.
"""
class_to_index = dict(zip(self.classes_, range(len(self.classes_))))
yt = self._transform(y, class_to_index)
if not self.sparse_output:
yt = yt.toarray()
return yt
def _transform(self, y, class_mapping):
"""Transforms the label sets with a given mapping
Parameters
----------
y : iterable of iterables
class_mapping : Mapping
Maps from label to column index in label indicator matrix
Returns
-------
y_indicator : sparse CSR matrix, shape (n_samples, n_classes)
Label indicator matrix
"""
indices = array.array('i')
indptr = array.array('i', [0])
for labels in y:
indices.extend(set(class_mapping[label] for label in labels))
indptr.append(len(indices))
data = np.ones(len(indices), dtype=int)
return sp.csr_matrix((data, indices, indptr),
shape=(len(indptr) - 1, len(class_mapping)))
def inverse_transform(self, yt):
"""Transform the given indicator matrix into label sets
Parameters
----------
yt : array or sparse matrix of shape (n_samples, n_classes)
A matrix containing only 1s ands 0s.
Returns
-------
y : list of tuples
The set of labels for each sample such that `y[i]` consists of
`classes_[j]` for each `yt[i, j] == 1`.
"""
if yt.shape[1] != len(self.classes_):
raise ValueError('Expected indicator for {0} classes, but got {1}'
.format(len(self.classes_), yt.shape[1]))
if sp.issparse(yt):
yt = yt.tocsr()
if len(yt.data) != 0 and len(np.setdiff1d(yt.data, [0, 1])) > 0:
raise ValueError('Expected only 0s and 1s in label indicator.')
return [tuple(self.classes_.take(yt.indices[start:end]))
for start, end in zip(yt.indptr[:-1], yt.indptr[1:])]
else:
unexpected = np.setdiff1d(yt, [0, 1])
if len(unexpected) > 0:
raise ValueError('Expected only 0s and 1s in label indicator. '
'Also got {0}'.format(unexpected))
return [tuple(self.classes_.compress(indicators)) for indicators
in yt]
|
0x0all/scikit-learn
|
sklearn/preprocessing/label.py
|
Python
|
bsd-3-clause
| 28,286 | 0 |
import httplib
import json
import urlparse
import error
class Response(object):
"""Describes an HTTP response received from a remote en"Describes an HTTP
response received from a remote end whose body has been read and parsed as
appropriate."""
def __init__(self, status, body):
self.status = status
self.body = body
def __repr__(self):
return "wdclient.Response(status=%d, body=%s)" % (self.status, self.body)
@classmethod
def from_http_response(cls, http_response):
status = http_response.status
body = http_response.read()
# SpecID: dfn-send-a-response
#
# > 3. Set the response's header with name and value with the following
# > values:
# >
# > "Content-Type"
# > "application/json; charset=utf-8"
# > "cache-control"
# > "no-cache"
if body:
try:
body = json.loads(body)
except:
raise error.UnknownErrorException("Failed to decode body as json:\n%s" % body)
return cls(status, body)
class ToJsonEncoder(json.JSONEncoder):
def default(self, obj):
return getattr(obj.__class__, "json", json.JSONEncoder().default)(obj)
class HTTPWireProtocol(object):
"""Transports messages (commands and responses) over the WebDriver
wire protocol.
"""
def __init__(self, host, port, url_prefix="/", timeout=None):
"""Construct interface for communicating with the remote server.
:param url: URL of remote WebDriver server.
:param wait: Duration to wait for remote to appear.
"""
self.host = host
self.port = port
self.url_prefix = url_prefix
self._timeout = timeout
def url(self, suffix):
return urlparse.urljoin(self.url_prefix, suffix)
def send(self, method, uri, body=None, headers=None):
"""Send a command to the remote.
:param method: `GET`, `POST`, or `DELETE`.
:param uri: Relative endpoint of the requests URL path.
:param body: Body of the request. Defaults to an empty
dictionary if ``method`` is `POST`.
:param headers: Additional headers to include in the request.
:return: Instance of ``wdclient.Response`` describing the
HTTP response received from the remote end.
"""
if body is None and method == "POST":
body = {}
if isinstance(body, dict):
body = json.dumps(body, cls=ToJsonEncoder)
if isinstance(body, unicode):
body = body.encode("utf-8")
if headers is None:
headers = {}
url = self.url(uri)
kwargs = {}
if self._timeout is not None:
kwargs["timeout"] = self._timeout
conn = httplib.HTTPConnection(
self.host, self.port, strict=True, **kwargs)
conn.request(method, url, body, headers)
try:
response = conn.getresponse()
return Response.from_http_response(response)
finally:
conn.close()
|
mhaessig/servo
|
tests/wpt/web-platform-tests/tools/webdriver/webdriver/transport.py
|
Python
|
mpl-2.0
| 3,140 | 0.001274 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetTask
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-tasks
# [START cloudtasks_v2beta2_generated_CloudTasks_GetTask_async]
from google.cloud import tasks_v2beta2
async def sample_get_task():
# Create a client
client = tasks_v2beta2.CloudTasksAsyncClient()
# Initialize request argument(s)
request = tasks_v2beta2.GetTaskRequest(
name="name_value",
)
# Make the request
response = await client.get_task(request=request)
# Handle the response
print(response)
# [END cloudtasks_v2beta2_generated_CloudTasks_GetTask_async]
|
googleapis/python-tasks
|
samples/generated_samples/cloudtasks_v2beta2_generated_cloud_tasks_get_task_async.py
|
Python
|
apache-2.0
| 1,437 | 0.000696 |
# Copyright 2020 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from http import HTTPStatus
from unittest import mock
import uuid
import ddt
from webob import exc
from cinder.api import api_utils
from cinder.api import microversions as mv
from cinder.api.v3 import default_types
from cinder import db
from cinder.policies import default_types as default_type_policies
from cinder.tests.unit.api import fakes as fake_api
from cinder.tests.unit import fake_constants
from cinder.tests.unit.policies import base
from cinder.tests.unit.policies import test_base
from cinder.tests.unit import utils as test_utils
class FakeProject(object):
def __init__(self, id=None, name=None):
if id:
self.id = id
else:
self.id = uuid.uuid4().hex
self.name = name
self.description = 'fake project description'
self.domain_id = 'default'
class DefaultVolumeTypesPolicyTests(test_base.CinderPolicyTests):
class FakeDefaultType:
project_id = fake_constants.PROJECT_ID
volume_type_id = fake_constants.VOLUME_TYPE_ID
def setUp(self):
super(DefaultVolumeTypesPolicyTests, self).setUp()
self.volume_type = self._create_fake_type(self.admin_context)
self.project = FakeProject()
# Need to mock out Keystone so the functional tests don't require other
# services
_keystone_client = mock.MagicMock()
_keystone_client.version = 'v3'
_keystone_client.projects.get.side_effect = self._get_project
_keystone_client_get = mock.patch(
'cinder.api.api_utils._keystone_client',
lambda *args, **kwargs: _keystone_client)
_keystone_client_get.start()
self.addCleanup(_keystone_client_get.stop)
def _get_project(self, project_id, *args, **kwargs):
return self.project
def test_system_admin_can_set_default(self):
system_admin_context = self.system_admin_context
path = '/v3/default-types/%s' % system_admin_context.project_id
body = {
'default_type':
{"volume_type": self.volume_type.id}
}
response = self._get_request_response(system_admin_context,
path, 'PUT', body=body,
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_project_admin_can_set_default(self):
admin_context = self.admin_context
path = '/v3/default-types/%s' % admin_context.project_id
body = {
'default_type':
{"volume_type": self.volume_type.id}
}
response = self._get_request_response(admin_context,
path, 'PUT', body=body,
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
@mock.patch.object(db, 'project_default_volume_type_get',
return_value=FakeDefaultType())
def test_system_admin_can_get_default(self, mock_default_get):
system_admin_context = self.system_admin_context
path = '/v3/default-types/%s' % system_admin_context.project_id
response = self._get_request_response(system_admin_context,
path, 'GET',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_project_admin_can_get_default(self):
admin_context = self.admin_context
path = '/v3/default-types/%s' % admin_context.project_id
body = {
'default_type':
{"volume_type": self.volume_type.id}
}
self._get_request_response(admin_context,
path, 'PUT', body=body,
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
path = '/v3/default-types/%s' % admin_context.project_id
response = self._get_request_response(admin_context,
path, 'GET',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_system_admin_can_get_all_default(self):
system_admin_context = self.system_admin_context
path = '/v3/default-types'
response = self._get_request_response(system_admin_context,
path, 'GET',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.OK, response.status_int)
def test_system_admin_can_unset_default(self):
system_admin_context = self.system_admin_context
path = '/v3/default-types/%s' % system_admin_context.project_id
response = self._get_request_response(system_admin_context,
path, 'DELETE',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.NO_CONTENT, response.status_int)
def test_project_admin_can_unset_default(self):
admin_context = self.admin_context
path = '/v3/default-types/%s' % admin_context.project_id
response = self._get_request_response(admin_context,
path, 'DELETE',
microversion=
mv.DEFAULT_TYPE_OVERRIDES)
self.assertEqual(HTTPStatus.NO_CONTENT, response.status_int)
@ddt.ddt
class DefaultVolumeTypesPolicyTest(base.BasePolicyTest):
authorized_admins = [
'system_admin',
'legacy_admin',
'project_admin',
]
unauthorized_admins = [
'legacy_owner',
'system_member',
'system_reader',
'system_foo',
'project_member',
'project_reader',
'project_foo',
'other_project_member',
'other_project_reader',
]
# Basic policy test is without enforcing scope (which cinder doesn't
# yet support) and deprecated rules enabled.
def setUp(self, enforce_scope=False, enforce_new_defaults=False,
*args, **kwargs):
super().setUp(enforce_scope, enforce_new_defaults, *args, **kwargs)
self.controller = default_types.DefaultTypesController()
self.api_path = '/v3/default-types/%s' % (self.project_id)
self.api_version = mv.DEFAULT_TYPE_OVERRIDES
def _create_volume_type(self):
vol_type = test_utils.create_volume_type(self.project_admin_context,
name='fake_vol_type',
testcase_instance=self)
return vol_type
@ddt.data(*base.all_users)
@mock.patch.object(api_utils, 'get_project')
def test_default_type_set_policy(self, user_id, fake_project):
vol_type = self._create_volume_type()
fake_project.return_value = FakeProject(id=self.project_id)
rule_name = default_type_policies.CREATE_UPDATE_POLICY
url = self.api_path
req = fake_api.HTTPRequest.blank(url, version=self.api_version)
req.method = 'POST'
body = {"default_type": {"volume_type": vol_type.id}}
unauthorized_exceptions = [exc.HTTPForbidden]
self.common_policy_check(user_id, self.authorized_admins,
self.unauthorized_admins,
unauthorized_exceptions,
rule_name, self.controller.create_update, req,
id=vol_type.id, body=body)
@ddt.data(*base.all_users)
@mock.patch.object(default_types.db, 'project_default_volume_type_get')
@mock.patch.object(api_utils, 'get_project')
def test_default_type_get_policy(self, user_id, fake_project,
mock_default_get):
fake_project.return_value = FakeProject(id=self.project_id)
rule_name = default_type_policies.GET_POLICY
url = self.api_path
req = fake_api.HTTPRequest.blank(url, version=self.api_version)
unauthorized_exceptions = [exc.HTTPForbidden]
self.common_policy_check(user_id, self.authorized_admins,
self.unauthorized_admins,
unauthorized_exceptions,
rule_name, self.controller.detail, req,
id=self.project_id)
@ddt.data(*base.all_users)
@mock.patch.object(default_types.db, 'project_default_volume_type_get')
def test_default_type_get_all_policy(self, user_id, mock_default_get):
rule_name = default_type_policies.GET_ALL_POLICY
url = self.api_path
req = fake_api.HTTPRequest.blank(url, version=self.api_version)
unauthorized_exceptions = [exc.HTTPForbidden]
# NOTE: The users 'legacy_admin' and 'project_admin' pass for
# GET_ALL_POLICY since with enforce_new_defaults=False, we have
# a logical OR between old policy and new one hence RULE_ADMIN_API
# allows them to pass
self.common_policy_check(user_id, self.authorized_admins,
self.unauthorized_admins,
unauthorized_exceptions,
rule_name, self.controller.index, req)
@ddt.data(*base.all_users)
@mock.patch.object(api_utils, 'get_project')
@mock.patch.object(default_types.db, 'project_default_volume_type_get')
def test_default_type_unset_policy(self, user_id, mock_default_unset,
fake_project):
fake_project.return_value = FakeProject(id=self.project_id)
rule_name = default_type_policies.DELETE_POLICY
url = self.api_path
req = fake_api.HTTPRequest.blank(url, version=self.api_version)
req.method = 'DELETE'
unauthorized_exceptions = [exc.HTTPForbidden]
self.common_policy_check(user_id, self.authorized_admins,
self.unauthorized_admins,
unauthorized_exceptions,
rule_name, self.controller.delete, req,
id=self.project_id)
class DefaultVolumeTypesPolicySecureRbacTest(DefaultVolumeTypesPolicyTest):
authorized_admins = [
'legacy_admin',
'system_admin',
'project_admin',
]
unauthorized_admins = [
'legacy_owner',
'system_member',
'system_reader',
'system_foo',
'project_member',
'project_reader',
'project_foo',
'other_project_member',
'other_project_reader',
]
def setUp(self, *args, **kwargs):
# Test secure RBAC by disabling deprecated policy rules (scope
# is still not enabled).
super().setUp(enforce_scope=False, enforce_new_defaults=True,
*args, **kwargs)
|
openstack/cinder
|
cinder/tests/unit/policies/test_default_volume_types.py
|
Python
|
apache-2.0
| 12,078 | 0.000662 |
from syncloud_platform.snap.models import App, AppVersions
from syncloud_platform.snap.snap import join_apps
def test_join_apps():
installed_app1 = App()
installed_app1.id = 'id1'
installed_app_version1 = AppVersions()
installed_app_version1.installed_version = 'v1'
installed_app_version1.current_version = None
installed_app_version1.app = installed_app1
installed_app2 = App()
installed_app2.id = 'id2'
installed_app_version2 = AppVersions()
installed_app_version2.installed_version = 'v1'
installed_app_version2.current_version = None
installed_app_version2.app = installed_app2
installed_apps = [installed_app_version1, installed_app_version2]
store_app2 = App()
store_app2.id = 'id2'
store_app_version2 = AppVersions()
store_app_version2.installed_version = None
store_app_version2.current_version = 'v2'
store_app_version2.app = store_app2
store_app3 = App()
store_app3.id = 'id3'
store_app_version3 = AppVersions()
store_app_version3.installed_version = None
store_app_version3.current_version = 'v2'
store_app_version3.app = store_app3
store_apps = [store_app_version2, store_app_version3]
all_apps = sorted(join_apps(installed_apps, store_apps), key=lambda app: app.app.id)
assert len(all_apps) == 3
assert all_apps[0].app.id == 'id1'
assert all_apps[0].installed_version == 'v1'
assert all_apps[0].current_version is None
assert all_apps[1].app.id == 'id2'
assert all_apps[1].installed_version == 'v1'
assert all_apps[1].current_version == 'v2'
assert all_apps[2].app.id == 'id3'
assert all_apps[2].installed_version is None
assert all_apps[2].current_version == 'v2'
|
syncloud/platform
|
src/test/snap/test_snap.py
|
Python
|
gpl-3.0
| 1,740 | 0.000575 |
# Copyright 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import click
from . import core
def _find_frontends():
result = {}
prefix = 'frontend_'
suffix = '.py'
dirname = os.path.dirname(__file__)
for fname in os.listdir(dirname):
if fname.startswith(prefix) and fname.endswith(suffix):
result[fname[len(prefix):-len(suffix)]] = os.path.join(dirname, fname)
return result
_FRONTENDS = _find_frontends()
def _get_frontend_mod(name):
try:
fname = _FRONTENDS[name]
except KeyError:
return None
mod = {'__file__': fname}
with open(fname) as f:
code = compile(f.read(), fname, 'exec')
eval(code, mod)
return mod
class _PlaidbenchCommand(click.MultiCommand):
def list_commands(self, ctx):
return _FRONTENDS.keys()
def get_command(self, ctx, name):
return _get_frontend_mod(name)['cli']
@click.command(cls=_PlaidbenchCommand)
@click.option('-v', '--verbose', count=True)
@click.option('-n',
'--examples',
type=int,
default=None,
help='Number of examples to use (over all epochs)')
@click.option(
'--blanket-run',
is_flag=True,
help='Run all networks at a range of batch sizes, ignoring the --batch-size and --examples '
'options and the choice of network.')
@click.option('--results',
type=click.Path(exists=False, file_okay=False, dir_okay=True),
default=os.path.join(tempfile.gettempdir(), 'plaidbench_results'),
help='Destination directory for results output')
@click.option('--callgrind/--no-callgrind',
default=False,
help='Invoke callgrind during timing runs')
@click.option('--epochs', type=int, default=1, help="Number of epochs per test")
@click.option('--batch-size', type=int, default=1)
@click.option('--timeout-secs', type=int, default=None)
@click.option('--warmup/--no-warmup', default=True, help='Do warmup runs before main timing')
@click.option('--kernel-timing/--no-kernel-timing', default=True, help='Emit kernel timing info')
@click.option('--print-stacktraces/--no-print-stacktraces',
default=False,
help='Print a stack trace if an exception occurs')
@click.pass_context
def plaidbench(ctx, verbose, examples, blanket_run, results, callgrind, epochs, batch_size,
timeout_secs, warmup, print_stacktraces, kernel_timing):
"""PlaidML Machine Learning Benchmarks
plaidbench runs benchmarks for a variety of ML framework, framework backend,
and neural network combinations.
For more information, see http://www.github.com/plaidml/plaidbench
"""
runner = ctx.ensure_object(core.Runner)
if blanket_run:
runner.param_builder = core.BlanketParamBuilder(epochs)
runner.reporter = core.BlanketReporter(os.path.expanduser(results))
runner.reporter.configuration['train'] = False
else:
runner.param_builder = core.ExplicitParamBuilder(batch_size, epochs, examples)
runner.reporter = core.ExplicitReporter(results)
runner.verbose = verbose
runner.callgrind = callgrind
runner.warmup = warmup
runner.kernel_timing = kernel_timing
runner.print_stacktraces = print_stacktraces
runner.timeout_secs = timeout_secs
|
plaidml/plaidml
|
plaidbench/plaidbench/cli.py
|
Python
|
apache-2.0
| 3,879 | 0.002578 |
import sublime
if int(sublime.version()) < 3000:
import ghci
import ghcmod
import haskell_docs
import hdevtools
import sublime_haskell_common as common
import symbols
else:
import SublimeHaskell.ghci as ghci
import SublimeHaskell.ghcmod as ghcmod
import SublimeHaskell.haskell_docs as haskell_docs
import SublimeHaskell.hdevtools as hdevtools
import SublimeHaskell.sublime_haskell_common as common
import SublimeHaskell.symbols as symbols
def symbol_info(filename, module_name, symbol_name, cabal = None, no_ghci = False):
result = None
if hdevtools.hdevtools_enabled():
result = hdevtools.hdevtools_info(filename, symbol_name, cabal = cabal)
if not result and ghcmod.ghcmod_enabled():
result = ghcmod.ghcmod_info(filename, module_name, symbol_name, cabal = cabal)
if not result and not filename and not no_ghci:
result = ghci.ghci_info(module_name, symbol_name, cabal = cabal)
return result
def load_docs(decl):
"""
Tries to load docs for decl
"""
if decl.docs is None:
decl.docs = haskell_docs.haskell_docs(decl.module.name, decl.name)
def refine_type(decl, no_ghci = True):
"""
Refine type for sources decl
"""
if decl.location:
if decl.what == 'function' and not decl.type:
info = symbol_info(decl.location.filename, decl.module.name, decl.name, None, no_ghci = no_ghci)
if info:
decl.type = info.type
def refine_decl(decl):
"""
Refine decl information.
"""
# Symbol from cabal, try to load detailed info with ghci
if not decl.location:
load_docs(decl)
if decl.what == 'declaration':
decl_detailed = ghci.ghci_info(decl.module.name, decl.name)
if decl_detailed:
decl.__dict__.update(decl_detailed.__dict__)
# Symbol from sources, concrete type if it's not specified
else:
refine_type(decl, False)
def browse_module(module_name, cabal = None):
"""
Returns symbols.Module with all declarations
"""
return ghcmod.ghcmod_browse_module(module_name, cabal = cabal)
|
hcarvalhoalves/SublimeHaskell
|
util.py
|
Python
|
mit
| 2,159 | 0.012043 |
# import the basic python packages we need
import os
import sys
import tempfile
import pprint
import traceback
# disable python from generating a .pyc file
sys.dont_write_bytecode = True
# change me to the path of pytan if this script is not running from EXAMPLES/PYTAN_API
pytan_loc = "~/gh/pytan"
pytan_static_path = os.path.join(os.path.expanduser(pytan_loc), 'lib')
# Determine our script name, script dir
my_file = os.path.abspath(sys.argv[0])
my_dir = os.path.dirname(my_file)
# try to automatically determine the pytan lib directory by assuming it is in '../../lib/'
parent_dir = os.path.dirname(my_dir)
pytan_root_dir = os.path.dirname(parent_dir)
lib_dir = os.path.join(pytan_root_dir, 'lib')
# add pytan_loc and lib_dir to the PYTHONPATH variable
path_adds = [lib_dir, pytan_static_path]
[sys.path.append(aa) for aa in path_adds if aa not in sys.path]
# import pytan
import pytan
# create a dictionary of arguments for the pytan handler
handler_args = {}
# establish our connection info for the Tanium Server
handler_args['username'] = "Administrator"
handler_args['password'] = "Tanium2015!"
handler_args['host'] = "10.0.1.240"
handler_args['port'] = "443" # optional
# optional, level 0 is no output except warnings/errors
# level 1 through 12 are more and more verbose
handler_args['loglevel'] = 1
# optional, use a debug format for the logging output (uses two lines per log entry)
handler_args['debugformat'] = False
# optional, this saves all response objects to handler.session.ALL_REQUESTS_RESPONSES
# very useful for capturing the full exchange of XML requests and responses
handler_args['record_all_requests'] = True
# instantiate a handler using all of the arguments in the handler_args dictionary
print "...CALLING: pytan.handler() with args: {}".format(handler_args)
handler = pytan.Handler(**handler_args)
# print out the handler string
print "...OUTPUT: handler string: {}".format(handler)
# setup the arguments for the handler() class
kwargs = {}
kwargs["export_format"] = u'csv'
kwargs["header_add_sensor"] = False
# setup the arguments for handler.ask()
ask_kwargs = {
'qtype': 'manual',
'sensors': [
"Computer Name", "IP Route Details", "IP Address",
'Folder Contents{folderPath=C:\Program Files}',
],
}
# ask the question that will provide the resultset that we want to use
print "...CALLING: handler.ask() with args {}".format(ask_kwargs)
response = handler.ask(**ask_kwargs)
# store the resultset object as the obj we want to export into kwargs
kwargs['obj'] = response['question_results']
# export the object to a string
# (we could just as easily export to a file using export_to_report_file)
print "...CALLING: handler.export_obj() with args {}".format(kwargs)
out = handler.export_obj(**kwargs)
# trim the output if it is more than 15 lines long
if len(out.splitlines()) > 15:
out = out.splitlines()[0:15]
out.append('..trimmed for brevity..')
out = '\n'.join(out)
print "...OUTPUT: print the export_str returned from export_obj():"
print out
|
tanium/pytan
|
BUILD/doc/source/examples/export_resultset_csv_sensor_false_code.py
|
Python
|
mit
| 3,040 | 0.002303 |
# vim: fileencoding=utf-8
"""
AppHtml settings
@author Toshiya NISHIO(http://www.toshiya240.com)
"""
defaultTemplate = {
'1) 小さいボタン': '${badgeS}',
'2) 大きいボタン': '${badgeL}',
'3) テキストのみ': '${textonly}',
"4) アイコン付き(小)": u"""<span class="appIcon"><img class="appIconImg" height="60" src="${icon60url}" style="float:left;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeS" style="display:inline-block; margin:6px">${badgeS}</span><br style="clear:both;">
""",
"5) アイコン付き(大)": u"""<span class="appIcon"><img class="appIconImg" height="100" src="${icon100url}" style="float:left;;margin: 0px 15px 15px 5px;"></span>
<span class="appName"><strong><a href="${url}" target="itunes_store">${name}</a></strong></span><br>
<span class="appCategory">カテゴリ: ${category}</span><br>
<span class="badgeL" style="display:inline-block; margin:4px">${badgeL}</span><br style="clear:both;">
"""
}
settings = {
'phg': "",
'cnt': 8,
'scs': {
'iphone': 320,
'ipad': 320,
'mac': 480
},
'template': {
'software': defaultTemplate,
'iPadSoftware': defaultTemplate,
'macSoftware': defaultTemplate,
'song': defaultTemplate,
'album': defaultTemplate,
'movie': defaultTemplate,
'ebook': defaultTemplate
}
}
|
connect1ngdots/AppHtmlME
|
AppHtmlME.workflow/Scripts/apphtml_settings.py
|
Python
|
mit
| 1,540 | 0.00411 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Cblas(Package):
"""The BLAS (Basic Linear Algebra Subprograms) are routines that
provide standard building blocks for performing basic vector and
matrix operations."""
homepage = "http://www.netlib.org/blas/_cblas/"
# tarball has no version, but on the date below, this MD5 was correct.
version('2015-06-06', '1e8830f622d2112239a4a8a83b84209a',
url='http://www.netlib.org/blas/blast-forum/cblas.tgz')
depends_on('blas')
parallel = False
def patch(self):
mf = FileFilter('Makefile.in')
mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' %
self.spec['blas'].prefix.lib)
mf.filter('^CC =.*', 'CC = cc')
mf.filter('^FC =.*', 'FC = f90')
def install(self, spec, prefix):
make('all')
mkdirp(prefix.lib)
mkdirp(prefix.include)
# Rename the generated lib file to libcblas.a
install('./lib/cblas_LINUX.a', '%s/libcblas.a' % prefix.lib)
install('./include/cblas.h', '%s' % prefix.include)
install('./include/cblas_f77.h', '%s' % prefix.include)
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/cblas/package.py
|
Python
|
lgpl-2.1
| 2,365 | 0 |
# -*- coding: UTF-8 -*-
from django.core.cache import cache
from django.db.models.signals import post_delete, post_save
import functools
import hashlib
WEEK = 7 * 24 * 60 * 60 # 1 week
def cache_me(key=None, ikey=None, signals=(), models=(), timeout=WEEK):
def hashme(k):
if isinstance(k, unicode):
k = k.encode('utf-8')
return hashlib.md5(k).hexdigest()
def decorator(f):
def invalidate(sender, **kwargs):
if ikey is None:
ks = (f.__name__,)
elif callable(ikey):
k = ikey(sender, **kwargs)
if isinstance(k, basestring):
ks = (k,)
else:
ks = k
else:
ks = (ikey,)
if ks:
cache.delete_many(map(hashme, ks))
if ikey or (ikey is None and key is None):
for s in signals:
s.connect(invalidate, weak=False)
for m in models:
post_save.connect(invalidate, sender=m, weak=False)
post_delete.connect(invalidate, sender=m, weak=False)
def _key(*args, **kwargs):
if key is None:
k = f.__name__
elif callable(key):
k = key(*args, **kwargs)
else:
k = key % args
return hashme(k)
@functools.wraps(f)
def wrapper(*args, **kwargs):
k = _key(*args, **kwargs)
data = cache.get(k)
if data is None:
data = f(*args, **kwargs)
cache.set(k, data, timeout)
return data
wrapper.cachekey = _key
return wrapper
return decorator
from collections import defaultdict
from django.conf import settings as dsettings
from django.core.urlresolvers import reverse
from microblog import models
from microblog import settings
from taggit.models import TaggedItem
import django_comments as comments
def _i_post_list(sender, **kw):
ks = []
for l in dsettings.LANGUAGES:
ks.append('m:post_list:%s' % l[0])
return ks
@cache_me(models=(models.Post,),
key='m:post_list:%s',
ikey=_i_post_list)
def post_list(lang):
qs = models.Post.objects\
.all()\
.byLanguage(lang)\
.order_by('-date')\
.select_related('category', 'author')
return list(qs)
@cache_me(models=(models.Post,))
def tag_map():
tmap = defaultdict(set)
items = TaggedItem.objects\
.filter(content_type__app_label='microblog', content_type__model='post')\
.select_related('tag')
for o in items:
tmap[o.object_id].add(o.tag)
return tmap
@cache_me(models=(models.Post,),
key = 'm:tagged_posts:%s',
ikey = 'm:tagged_posts:%s')
def tagged_posts(name):
"""
restituisce i post taggati con il tag passato
"""
posts = TaggedItem.objects\
.filter(content_type__app_label='microblog', content_type__model='post')\
.filter(tag__name__iexact=name)\
.values_list('object_id', flat=True)
return set(posts)
def _i_post_data(sender, **kw):
if sender is models.Post:
pid = kw['instance'].id
elif sender is comments.get_model():
o = kw['instance']
if o.content_type.app_label == 'microblog' and o.content_type.model == 'post':
pid = o.object_pk
else:
pid = None
else:
pid = kw['instance'].post_id
ks = []
if pid:
for l in dsettings.LANGUAGES:
ks.append('m:post_data:%s%s' % (pid, l[0]))
return ks
@cache_me(models=(models.Post, models.PostContent, comments.get_model()),
key='m:post_data:%s%s',
ikey=_i_post_data)
def post_data(pid, lang):
post = models.Post.objects\
.select_related('author', 'category')\
.get(id=pid)
try:
content = post.content(lang=lang, fallback=True)
except models.PostContent.DoesNotExist:
content = None
comment_list = comments.get_model().objects\
.filter(content_type__app_label='microblog', content_type__model='post')\
.filter(object_pk=pid, is_public=True)
burl = models.PostContent.build_absolute_url(post, content)
return {
'post': post,
'content': content,
'url': dsettings.DEFAULT_URL_PREFIX + reverse(burl[0], args=burl[1], kwargs=burl[2]),
'comments': list(comment_list),
'tags': list(post.tags.all()),
}
def _i_get_reactions(sender, **kw):
if sender is models.Trackback:
return 'm:reaction:%s' % kw['instance'].content_id
else:
return 'm:reaction:%s' % kw['instance'].object_id
if settings.MICROBLOG_PINGBACK_SERVER:
deco = cache_me(models=(models.Trackback,),
key='m:reactions:%s',
ikey=_i_get_reactions)
else:
from pingback.models import Pingback
deco = cache_me(models=(models.Trackback, Pingback),
key='m:reactions:%s',
ikey=_i_get_reactions)
@deco
def get_reactions(cid):
trackbacks = models.Trackback.objects.filter(content=cid)
if settings.MICROBLOG_PINGBACK_SERVER:
from pingback.models import Pingback
# Purtroppo il metodo pingbacks_for_object vuole un oggetto non un id
content = models.PostContent.objects.get(id=cid)
pingbacks = Pingback.objects.pingbacks_for_object(content).filter(approved=True)
else:
pingbacks = []
reactions = sorted(list(trackbacks) + list(pingbacks), key=lambda r: r.date, reverse=True)
# normalizzo le reactions, mi assicuro che tutte abbiano un excerpt
for ix, r in enumerate(reactions):
if not hasattr(r, 'excerpt'):
r.excerpt = r.content
return reactions
|
matrixise/epcon
|
microblog/dataaccess.py
|
Python
|
bsd-2-clause
| 5,734 | 0.007499 |
from static import tools
class DrawAble(object):
def __init__(self,image,position,zIndex=0,activated=True):
self.image=image
self.position=position
self._zIndex=zIndex
self.__activated=None
self.activated=activated
def __del__(self):
self.activated=False
#zindex
def __getZIndex(self):
return self._zIndex
zIndex=property(__getZIndex)
#enabled
def _disable(self):
tools.spritebatch.remove(self)
def _enable(self):
tools.spritebatch.add(self)
def __setActivated(self,b):
if self.__activated!=b:
self.__activated=b
if b:
self._enable()
else:
self._disable()
def __getActivated(self):
return self.__activated
activated=property(__getActivated,__setActivated)
|
gitlitz/pygame-with-interpreter
|
drawable.py
|
Python
|
gpl-3.0
| 750 | 0.064 |
#-*- coding:utf-8 -*-
'''
Created on 18/2/2015
@author: PC06
'''
from flaskext.mysql import MySQL
from flask import Flask
class DBcon():
'''
classdocs
'''
def __init__(self):
'''
Constructor
'''
pass
def conexion(self):
mysql = MySQL()
app = Flask(__name__)
app.config['MYSQL_DATABASE_USER'] = 'python'
app.config['MYSQL_DATABASE_PASSWORD'] = '123456'
app.config['MYSQL_DATABASE_DB'] = 'ventas'
app.config['MYSQL_DATABASE_HOST'] = 'localhost'
mysql.init_app(app)
return mysql
|
Elvirita/reposelvira
|
elviraae/ec/edu/itsae/conn/DBcon.py
|
Python
|
gpl-2.0
| 604 | 0.006623 |
from charmhelpers.cli import cmdline
from charmhelpers.core import hookenv
from charmhelpers.core import unitdata
from charms.reactive.bus import FlagWatch
from charms.reactive.trace import tracer
__all__ = [
'set_flag',
'clear_flag',
'toggle_flag',
'register_trigger',
'is_flag_set',
'all_flags_set',
'any_flags_set',
'get_flags',
'get_unset_flags',
'set_state', # DEPRECATED
'remove_state', # DEPRECATED
'toggle_state', # DEPRECATED
'is_state', # DEPRECATED
'all_states', # DEPRECATED
'get_states', # DEPRECATED
'any_states', # DEPRECATED
]
TRIGGER_CALLBACKS = {}
class State(str):
"""
.. deprecated:: 0.5.0
Use flag instead
A reactive state that can be set.
States are essentially just strings, but this class should be used to enable them
to be discovered and introspected, for documentation, composition, or linting.
This should be used with :class:`StateList`.
"""
pass
class StateList(object):
"""
.. deprecated:: 0.5.0
use flag instead
Base class for a set of states that can be set by a relation or layer.
This class should be used so that they can be discovered and introspected,
for documentation, composition, or linting.
Example usage::
class MyRelation(RelationBase):
class states(StateList):
connected = State('{relation_name}.connected')
available = State('{relation_name}.available')
"""
pass
@cmdline.subcommand()
@cmdline.no_output
def set_flag(flag, value=None):
"""set_flag(flag)
Set the given flag as active.
:param str flag: Name of flag to set.
.. note:: **Changes to flags are reset when a handler crashes.** Changes to
flags happen immediately, but they are only persisted at the end of a
complete and successful run of the reactive framework. All unpersisted
changes are discarded when a hook crashes.
"""
old_flags = get_flags()
unitdata.kv().update({flag: value}, prefix='reactive.states.')
if flag not in old_flags:
tracer().set_flag(flag)
FlagWatch.change(flag)
trigger = _get_trigger(flag, None)
for flag_name in trigger['set_flag']:
set_flag(flag_name)
for flag_name in trigger['clear_flag']:
clear_flag(flag_name)
for callback in trigger['callbacks']:
callback()
@cmdline.subcommand()
@cmdline.no_output
def clear_flag(flag):
"""
Clear / deactivate a flag.
:param str flag: Name of flag to set.
.. note:: **Changes to flags are reset when a handler crashes.** Changes to
flags happen immediately, but they are only persisted at the end of a
complete and successful run of the reactive framework. All unpersisted
changes are discarded when a hook crashes.
"""
old_flags = get_flags()
unitdata.kv().unset('reactive.states.%s' % flag)
unitdata.kv().set('reactive.dispatch.removed_state', True)
if flag in old_flags:
tracer().clear_flag(flag)
FlagWatch.change(flag)
trigger = _get_trigger(None, flag)
for flag_name in trigger['set_flag']:
set_flag(flag_name)
for flag_name in trigger['clear_flag']:
clear_flag(flag_name)
for callback in trigger['callbacks']:
callback()
@cmdline.subcommand()
@cmdline.no_output
def toggle_flag(flag, should_set):
"""
Helper that calls either :func:`set_flag` or :func:`clear_flag`,
depending on the value of `should_set`.
Equivalent to::
if should_set:
set_flag(flag)
else:
clear_flag(flag)
:param str flag: Name of flag to toggle.
:param bool should_set: Whether to set the flag, or clear it.
.. note:: **Changes to flags are reset when a handler crashes.** Changes to
flags happen immediately, but they are only persisted at the end of a
complete and successful run of the reactive framework. All unpersisted
changes are discarded when a hook crashes.
"""
if should_set:
set_flag(flag)
else:
clear_flag(flag)
@cmdline.subcommand()
@cmdline.no_output
def register_trigger(when=None, when_not=None, set_flag=None, clear_flag=None, callback=None):
"""
Register a trigger to set or clear a flag when a given flag is set.
Note: Flag triggers are handled at the same time that the given flag is set.
:param str when: Flag to trigger on when it is set.
:param str when_not: Flag to trigger on when it is cleared.
:param str set_flag: If given, this flag will be set when the relevant flag is changed.
:param str clear_flag: If given, this flag will be cleared when the relevant flag is changed.
:param str callback: If given, this callback will be invoked when the relevant flag is changed.
Note: Exactly one of either `when` or `when_not`, and at least one of
`set_flag`, `clear_flag`, or `callback` must be provided.
"""
if not any((when, when_not)):
raise ValueError('Must provide one of when or when_not')
if all((when, when_not)):
raise ValueError('Only one of when or when_not can be provided')
if not any((set_flag, clear_flag, callback)):
raise ValueError('Must provide at least one of set_flag, clear_flag, or callback')
trigger = _get_trigger(when, when_not)
if set_flag and set_flag not in trigger['set_flag']:
trigger['set_flag'].append(set_flag)
if clear_flag and clear_flag not in trigger['clear_flag']:
trigger['clear_flag'].append(clear_flag)
if callback and callback not in trigger['callbacks']:
trigger['callbacks'].append(callback)
_save_trigger(when, when_not, trigger)
def _get_trigger(when, when_not):
if when is not None:
key = 'reactive.flag_set_triggers.{}'.format(when)
elif when_not is not None:
key = 'reactive.flag_clear_triggers.{}'.format(when_not)
triggers = unitdata.kv().get(key, {
'set_flag': [],
'clear_flag': [],
})
triggers['callbacks'] = TRIGGER_CALLBACKS.get(key, [])
return triggers
def _save_trigger(when, when_not, data):
if when is not None:
key = 'reactive.flag_set_triggers.{}'.format(when)
elif when_not is not None:
key = 'reactive.flag_clear_triggers.{}'.format(when_not)
TRIGGER_CALLBACKS[key] = data.pop('callbacks')
return unitdata.kv().set(key, data)
@cmdline.subcommand()
@cmdline.test_command
def is_flag_set(flag):
"""Assert that a flag is set"""
return any_flags_set(flag)
@cmdline.subcommand()
@cmdline.test_command
def all_flags_set(*desired_flags):
"""Assert that all desired_flags are set"""
active_flags = get_flags()
return all(flag in active_flags for flag in desired_flags)
@cmdline.subcommand()
@cmdline.test_command
def any_flags_set(*desired_flags):
"""Assert that any of the desired_flags are set"""
active_flags = get_flags()
return any(flag in active_flags for flag in desired_flags)
@cmdline.subcommand()
def get_flags():
"""
Return a list of all flags which are set.
"""
flags = unitdata.kv().getrange('reactive.states.', strip=True) or {}
return sorted(flags.keys())
@cmdline.subcommand()
def get_unset_flags(*desired_flags):
"""Check if any of the provided flags missing and return them if so.
:param desired_flags: list of reactive flags
:type desired_flags: non-keyword args, str
:returns: list of unset flags filtered from the parameters shared
:rtype: List[str]
"""
flags = unitdata.kv().getrange('reactive.states.', strip=True) or {}
return sorted(set(desired_flags) - flags.keys())
def _get_flag_value(flag, default=None):
return unitdata.kv().get('reactive.states.%s' % flag, default)
# DEPRECATED
@cmdline.subcommand()
@cmdline.no_output
def set_state(state, value=None):
"""
.. deprecated:: 0.5.0
Alias of :func:`set_flag`.
"""
set_flag(state, value)
@cmdline.subcommand()
@cmdline.no_output
def remove_state(state):
"""
.. deprecated:: 0.5.0
Alias of :func:`clear_flag`"""
clear_flag(state)
def toggle_state(state, should_set):
"""
.. deprecated:: 0.5.0
Alias of :func:`toggle_flag`"""
toggle_flag(state, should_set)
@cmdline.subcommand()
@cmdline.test_command
def is_state(state):
"""
.. deprecated:: 0.5.0
Alias for :func:`is_flag_set`"""
return is_flag_set(state)
@cmdline.subcommand()
@cmdline.test_command
def all_states(*desired_states):
"""
.. deprecated:: 0.5.0
Alias for :func:`all_flags_set`"""
return all_flags_set(*desired_states)
@cmdline.subcommand()
@cmdline.test_command
def any_states(*desired_states):
"""
.. deprecated:: 0.5.0
Alias for :func:`any_flags_set`"""
return any_flags_set(*desired_states)
@cmdline.subcommand()
def get_states():
"""
.. deprecated:: 0.5.0
Use :func:`get_flags` instead.
Return a mapping of all active states to their values.
"""
return unitdata.kv().getrange('reactive.states.', strip=True) or {}
def get_state(flag, default=None):
"""
.. deprecated:: 0.5.0
For internal use only.
"""
return _get_flag_value(flag, default)
# INTERNAL
@hookenv.atstart
def _manage_automatic_flags():
_manage_upgrade_flags()
def _manage_upgrade_flags():
hook_name = hookenv.hook_name()
if hook_name == 'pre-series-upgrade':
set_flag('upgrade.series.in-progress')
if hook_name == 'post-series-upgrade':
clear_flag('upgrade.series.in-progress')
@hookenv.atexit
def _clear_triggers():
unitdata.kv().unsetrange(prefix='reactive.flag_triggers.') # old key
unitdata.kv().unsetrange(prefix='reactive.flag_set_triggers.')
unitdata.kv().unsetrange(prefix='reactive.flag_clear_triggers.')
|
juju-solutions/charms.reactive
|
charms/reactive/flags.py
|
Python
|
apache-2.0
| 9,936 | 0.000805 |
import time
import zipfile
from io import BytesIO
from django.utils.image import Image as D_Image
from django.core.files.base import ContentFile
from celery import task
from .models import Image
@task
def upload_zip(to_upload):
print("In the zip!")
zip = zipfile.ZipFile(to_upload.zip_file)
bad_file = zip.testzip()
if bad_file:
zip.close()
raise Exception('"%s" in zip archive is corrupt' % bad_file)
count = 1
for file_name in sorted(zip.namelist()):
if file_name.startswith('__') or file_name.startswith('.'):
continue
data = zip.read(file_name)
if not len(data):
continue
try:
file = BytesIO(data)
opened = D_Image.open(file)
opened.verify()
except Exception:
raise Exception('"%s" is a bad image file' % format(file_name))
if not to_upload.title:
title = '_'.join([format(file_name), str(count)])
else:
title = '_'.join([to_upload.title, str(count)])
image = Image(title=title,
created=time.time(),
public=to_upload.public,
user=to_upload.user, )
content_file = ContentFile(data)
image.image.save(file_name, content_file)
image.save()
image.albums.add(to_upload.albums)
image.save()
count += 1
zip.close()
return "Zip file uploaded!!"
|
dsimandl/teamsurmandl
|
gallery/tasks.py
|
Python
|
mit
| 1,465 | 0.001365 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for l.d.tf.optimizers.python.PeriodicInvCovUpdateKfacOpt class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import sonnet as snt
import tensorflow.compat.v1 as tf
from kfac.python.ops import layer_collection
from kfac.python.ops.kfac_utils import periodic_inv_cov_update_kfac_opt
from kfac.python.ops.tensormatch import graph_search
_BATCH_SIZE = 128
def _construct_layer_collection(layers, all_logits, var_list):
for idx, logits in enumerate(all_logits):
tf.logging.info("Registering logits: %s", logits)
with tf.variable_scope(tf.get_variable_scope(), reuse=(idx > 0)):
layers.register_categorical_predictive_distribution(
logits, name="register_logits")
batch_size = all_logits[0].shape.as_list()[0]
vars_to_register = var_list if var_list else tf.trainable_variables()
graph_search.register_layers(layers, vars_to_register, batch_size)
class PeriodicInvCovUpdateKfacOptTest(tf.test.TestCase):
def test_train(self):
image = tf.random_uniform(shape=(_BATCH_SIZE, 784), maxval=1.)
labels = tf.random_uniform(shape=(_BATCH_SIZE,), maxval=10, dtype=tf.int32)
labels_one_hot = tf.one_hot(labels, 10)
model = snt.Sequential([snt.BatchFlatten(), snt.nets.MLP([128, 128, 10])])
logits = model(image)
all_losses = tf.nn.softmax_cross_entropy_with_logits_v2(
logits=logits, labels=labels_one_hot)
loss = tf.reduce_mean(all_losses)
layers = layer_collection.LayerCollection()
optimizer = periodic_inv_cov_update_kfac_opt.PeriodicInvCovUpdateKfacOpt(
invert_every=10,
cov_update_every=1,
learning_rate=0.03,
cov_ema_decay=0.95,
damping=100.,
layer_collection=layers,
momentum=0.9,
num_burnin_steps=0,
placement_strategy="round_robin")
_construct_layer_collection(layers, [logits], tf.trainable_variables())
train_step = optimizer.minimize(loss)
counter = optimizer.counter
max_iterations = 50
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
coord = tf.train.Coordinator()
tf.train.start_queue_runners(sess=sess, coord=coord)
for iteration in range(max_iterations):
sess.run([loss, train_step])
counter_ = sess.run(counter)
self.assertEqual(counter_, iteration + 1.0)
if __name__ == "__main__":
tf.disable_v2_behavior()
tf.test.main()
|
tensorflow/kfac
|
kfac/python/kernel_tests/periodic_inv_cov_update_kfac_opt_test.py
|
Python
|
apache-2.0
| 3,178 | 0.003776 |
from django.contrib import messages
from django.http import Http404, HttpResponse
from django.shortcuts import render
from corehq.apps.app_manager.dbaccessors import get_app
from corehq.apps.app_manager.decorators import require_deploy_apps, \
require_can_edit_apps
from corehq.apps.app_manager.xform import XForm
from corehq.util.view_utils import set_file_download
from dimagi.utils.logging import notify_exception
from dimagi.utils.subprocess_timeout import ProcessTimedOut
@require_can_edit_apps
def multimedia_list_download(request, domain, app_id):
app = get_app(domain, app_id)
include_audio = request.GET.get("audio", True)
include_images = request.GET.get("images", True)
strip_jr = request.GET.get("strip_jr", True)
filelist = []
for m in app.get_modules():
for f in m.get_forms():
parsed = XForm(f.source)
parsed.validate()
if include_images:
filelist.extend(parsed.image_references)
if include_audio:
filelist.extend(parsed.audio_references)
if strip_jr:
filelist = [s.replace("jr://file/", "") for s in filelist if s]
response = HttpResponse()
set_file_download(response, 'list.txt')
response.write("\n".join(sorted(set(filelist))))
return response
@require_deploy_apps
def multimedia_ajax(request, domain, app_id, template='app_manager/v1/partials/multimedia_ajax.html'):
app = get_app(domain, app_id)
if app.get_doc_type() == 'Application':
try:
multimedia_state = app.check_media_state()
except ProcessTimedOut:
notify_exception(request)
messages.warning(request, (
"We were unable to check if your forms had errors. "
"Refresh the page and we will try again."
))
multimedia_state = {
'has_media': False,
'has_form_errors': True,
'has_missing_refs': False,
}
context = {
'multimedia_state': multimedia_state,
'domain': domain,
'app': app,
}
return render(request, template, context)
else:
raise Http404()
|
qedsoftware/commcare-hq
|
corehq/apps/app_manager/views/multimedia.py
|
Python
|
bsd-3-clause
| 2,218 | 0.000451 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_attendance_error(osv.osv_memory):
_name = 'hr.attendance.error'
_description = 'Print Error Attendance Report'
_columns = {
'init_date': fields.date('Starting Date', required=True),
'end_date': fields.date('Ending Date', required=True),
'max_delay': fields.integer('Max. Delay (Min)', required=True)
}
_defaults = {
'init_date': lambda *a: time.strftime('%Y-%m-%d'),
'end_date': lambda *a: time.strftime('%Y-%m-%d'),
'max_delay': 120,
}
def print_report(self, cr, uid, ids, context=None):
emp_ids = []
data_error = self.read(cr, uid, ids, context=context)[0]
date_from = data_error['init_date']
date_to = data_error['end_date']
cr.execute("SELECT id FROM hr_attendance WHERE employee_id IN %s AND to_char(name,'YYYY-mm-dd')<=%s AND to_char(name,'YYYY-mm-dd')>=%s AND action IN %s ORDER BY name" ,(tuple(context['active_ids']), date_to, date_from, tuple(['sign_in','sign_out'])))
attendance_ids = [x[0] for x in cr.fetchall()]
if not attendance_ids:
raise osv.except_osv(_('No Data Available !'), _('No records are found for your selection!'))
attendance_records = self.pool.get('hr.attendance').browse(cr, uid, attendance_ids, context=context)
for rec in attendance_records:
if rec.employee_id.id not in emp_ids:
emp_ids.append(rec.employee_id.id)
data_error['emp_ids'] = emp_ids
datas = {
'ids': [],
'model': 'hr.employee',
'form': data_error
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'hr.attendance.error',
'datas': datas,
}
hr_attendance_error()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
mrshelly/openerp71313
|
openerp/addons/hr_attendance/wizard/hr_attendance_error.py
|
Python
|
agpl-3.0
| 2,918 | 0.002742 |
#!/bin/env python3
"""
Copyright (C) 2014 by Yichao Zhou <broken.zhou AT gmail DOT com>
License: http://www.gnu.org/licenses/gpl.html GPL version 3 or higher
Any comments are welcome through email and github!
"""
import codecs
import re
import random
import os
import pexpect
import sys
import string
import time
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4.QtCore import Qt, pyqtSignal
try:
from PyQt4.QtCore import QString
except ImportError:
QString = str
RETRY_DELAY = [ 5, 5, 10, 20, 60 ]
STATE_NOTCONNECTED = 0
STATE_CONNECTING = 1
STATE_CONNECTED = 2
class SSHThread(QtCore.QThread):
# signal
messageLogged = pyqtSignal(QString)
stateSwitched = pyqtSignal(int)
portOccupied = pyqtSignal()
answerCondition = QtCore.QWaitCondition()
SSH_UNKNOWN = 0
SSH_FAIL = 1
SSH_EXIT = 2
SSH_OK = 3
SSH_RETRY = 4
def __init__(self, app):
super(SSHThread, self).__init__()
self.app = app
self.retryCount = 0
def log(self, msg):
self.messageLogged.emit(msg)
def switchState(self, state):
self.stateSwitched.emit(state)
def wait(self, timeout=30):
"""Try to establish a connection and return current status
Args:
timeout: set the timeout of pexpect
Returns:
SSH_OK: This connection is successful.
SSH_FAIL: Connection failed due to some reason, retry.
SSH_EXIT: Fatal error or user teminated the process.
Thread should exit now.
SSH_UNKNOWN: SSH does not return enough information
"""
index = self.ssh.expect([
pexpect.TIMEOUT, #0
"ssh: connect to host", #1
"Permission denied (publickey)", #2
"The authenticity of host", #3
"s password: ", #4
pexpect.EOF, #5
"execing", #6
"connection ok", #7
"restarting ssh", #8
"channel_setup_fwd_listener: cannot listen to port", #9
"Address already in use", #10
], timeout=timeout)
if index == 0:
# Nothing happenes
return self.SSH_UNKNOWN
elif index == 1:
return self.SSH_FAIL
elif index == 2:
self.log("Permission denied; Is your private key correct?")
return self.SSH_EXIT
elif index == 3:
# We need to prompt the user to accept the server's fingerprint
self.log("Please check server's fingerprint")
self.ssh.sendline("yes")
return self.wait(timeout=timeout)
elif index == 4:
# Send password
return self.sendPassword(timeout=timeout)
elif index == 5:
# Process terminated
self.log("Process terminated")
return self.SSH_EXIT
elif index == 6:
self.log("Connecting...")
return self.SSH_UNKNOWN
elif index == 7:
# connection ok
return self.SSH_OK
elif index == 8:
self.log("Connection is down. Please wait for reconnecting")
return self.SSH_FAIL
elif index == 9:
portNumber = self.app.socksPortEdit.text();
self.log("Cannot listen SOCKS5 port at {}".format(portNumber))
return self.SSH_EXIT
elif index == 10:
portNumber = self.app.monitorPort
self.log("Bad luck: monitor port is already in use".format(portNumber))
self.app.chooseMonitorPort()
return self.SSH_RETRY
def sendPassword(self, timeout):
self.ssh.sendline(self.app.passphraseLineEdit.text())
self.log("Password is sent")
index = self.ssh.expect([
pexpect.TIMEOUT, #0
"Permission denied", #1
"ssh: connect to host", #2
pexpect.EOF, #3
"connection ok", #4
], timeout=timeout)
if index == 0:
return self.SSH_UNKNOWN
elif index == 1:
self.log("Permission denied; Is your password correct?")
self.app.disconnect()
return self.SSH_EXIT
elif index == 2:
return self.SSH_FAIL
elif index == 3:
self.log("Process terminated")
return self.SSH_EXIT
elif index == 4:
return self.SSH_OK
def checkState(self, result):
"""Check the result of wait(), and handle it.
Returns:
True: if this thread should exit
False: otherwise
"""
if result == self.SSH_OK:
self.switchState(STATE_CONNECTED)
return False
elif result == self.SSH_EXIT:
self.switchState(STATE_NOTCONNECTED)
self.app.disconnect()
return True
elif result == self.SSH_UNKNOWN:
return False;
elif result == self.SSH_FAIL:
# SSH_FAIL: Retry
self.switchState(STATE_CONNECTING)
return False
elif result == self.SSH_RETRY:
self.connect()
return False
def connect(self):
self.app.disconnect()
self.switchState(STATE_CONNECTING)
command = self.app.getSSHCommand()
env = self.app.getSSHEnv()
self.log(command)
self.ssh = pexpect.spawn(command, env=env)
def disconnect(self):
while hasattr(self, 'ssh') and not self.ssh.terminate(force=True):
pass
self.switchState(STATE_NOTCONNECTED)
def run(self):
self.connect()
first = True
while True:
state = self.wait(timeout=15)
if self.checkState(state):
return
if state == self.SSH_FAIL:
first = True
if state == self.SSH_OK and first:
first = False
self.log("Connection is established")
class QTunneler(QtGui.QWidget):
def __init__(self):
super(QTunneler, self).__init__()
self.initUI()
self.sshThread = SSHThread(self)
self.state = STATE_NOTCONNECTED
self.loadConfig()
self.sshThread.messageLogged.connect(self.log)
self.sshThread.stateSwitched.connect(self.switchState)
self.autoStartup.stateChanged.connect(self.saveConfig)
self.autoConnect.stateChanged.connect(self.saveConfig)
self.usingPolipo.stateChanged.connect(self.saveConfig)
self.remeberPassowrd.stateChanged.connect(self.saveConfig)
self.promptOnExit.stateChanged.connect(self.saveConfig)
def initUI(self): #{{{
expandingPolicy = QtGui.QSizePolicy.Expanding
vertical = QtGui.QVBoxLayout()
editLayout = QtGui.QGridLayout()
vertical.addLayout(editLayout)
# editLayout.setSpacing(5)
self.serverAddressLabel = QtGui.QLabel("Server Address:")
self.serverAddressLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.serverAddressLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.serverAddressLineEdit = QtGui.QLineEdit()
editLayout.addWidget(self.serverAddressLabel, 0, 0)
editLayout.addWidget(self.serverAddressLineEdit, 0, 1)
self.serverPortLabel = QtGui.QLabel("Server Port:")
self.serverPortLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.serverPortLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.serverPortLineEdit = QtGui.QLineEdit()
editLayout.addWidget(self.serverPortLabel, 0, 2)
editLayout.addWidget(self.serverPortLineEdit, 0, 3)
self.usernameLabel = QtGui.QLabel("Username:")
self.usernameLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.usernameLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.usernameLineEdit = QtGui.QLineEdit()
editLayout.addWidget(self.usernameLabel, 1, 0)
editLayout.addWidget(self.usernameLineEdit, 1, 1)
self.passphraseLabel = QtGui.QLabel("Passphrase:")
self.passphraseLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.passphraseLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.passphraseLineEdit = QtGui.QLineEdit()
self.passphraseLineEdit.setEchoMode(QtGui.QLineEdit.Password)
editLayout.addWidget(self.passphraseLabel, 1, 2)
editLayout.addWidget(self.passphraseLineEdit, 1, 3)
self.socksPortLabel = QtGui.QLabel("SOCKS5 Proxy Port:")
self.socksPortLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.socksPortLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.socksPortEdit = QtGui.QLineEdit("7070")
editLayout.addWidget(self.socksPortLabel, 2, 0)
editLayout.addWidget(self.socksPortEdit, 2, 1)
self.httpPortLabel = QtGui.QLabel("HTTP Proxy Port:")
self.httpPortLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.httpPortLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.httpPortEdit = QtGui.QLineEdit("8080")
self.httpPortEdit.setEnabled(False);
editLayout.addWidget(self.httpPortLabel, 2, 2)
editLayout.addWidget(self.httpPortEdit, 2, 3)
self.identityLabel = QtGui.QLabel("Identity File:")
self.identityLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.identityLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.identityEdit = QtGui.QLineEdit()
editLayout.addWidget(self.identityLabel, 3, 0)
editLayout.addWidget(self.identityEdit, 3, 1, 1, 3)
self.optionalLabel = QtGui.QLabel("Optional Arguments:")
self.optionalLabel.setSizePolicy(expandingPolicy, expandingPolicy)
self.optionalLabel.setAlignment(Qt.AlignVCenter | Qt.AlignRight)
self.optionalEdit = QtGui.QLineEdit()
editLayout.addWidget(self.optionalLabel, 4, 0)
editLayout.addWidget(self.optionalEdit, 4, 1, 1, 3)
configurationLayout = QtGui.QGridLayout()
vertical.addLayout(configurationLayout)
self.autoStartup = QtGui.QCheckBox("Auto-startup")
self.autoConnect = QtGui.QCheckBox("Auto-connect on Startup")
self.usingPolipo = QtGui.QCheckBox("Using Polipo for HTTP Proxy")
self.remeberPassowrd = QtGui.QCheckBox("Save Password")
self.promptOnExit = QtGui.QCheckBox("Prompt on Exit")
configurationLayout.addWidget(self.autoStartup, 0, 0)
configurationLayout.addWidget(self.autoConnect, 0, 1)
configurationLayout.addWidget(self.usingPolipo, 0, 2)
configurationLayout.addWidget(self.remeberPassowrd, 1, 0)
configurationLayout.addWidget(self.promptOnExit, 1, 1)
self.usingPolipo.stateChanged.connect(self.httpPortEdit.setEnabled)
self.historyList = QtGui.QTextEdit()
self.historyList.setReadOnly(True)
self.historyList.setFont(QtGui.QFont("monospace"))
vertical.addWidget(self.historyList)
buttonLayout = QtGui.QHBoxLayout()
vertical.addLayout(buttonLayout)
self.connectButton = QtGui.QPushButton("&Connect")
self.connectButton.clicked.connect(self.connect)
self.hideButton = QtGui.QPushButton("&Hide")
self.hideButton.clicked.connect(self.minimize)
self.exitButton = QtGui.QPushButton("&Exit")
self.exitButton.clicked.connect(self.exit)
self.aboutButton = QtGui.QPushButton("&About")
self.aboutButton.clicked.connect(self.about)
buttonLayout.addWidget(self.connectButton)
buttonLayout.addWidget(self.hideButton)
buttonLayout.addWidget(self.exitButton)
buttonLayout.addWidget(self.aboutButton)
self.setLayout(vertical)
self.setWindowTitle("QTunneler - Not Connected")
self.move(QtGui.QApplication.desktop().screen().rect().center() -
self.rect().center())
self.show()
self.passphraseLineEdit.setFocus()
#}}}
def closeEvent(self, event):
self.disconnect()
event.accept()
def getSSHCommand(self):
if not hasattr(self, "monitorPort"):
self.chooseMonitorPort()
command = "autossh -M {} -TnN".format(self.monitorPort)
command += " -D {}".format(self.socksPortEdit.text())
if self.identityEdit.text().strip() != "":
command += " -i {}".format(self.identityEdit.text())
command += " {}@{}".format(self.usernameLineEdit.text(),
self.serverAddressLineEdit.text())
return command
def getSSHEnv(self):
return {
"AUTOSSH_DEBUG" : "1",
"AUTOSSH_FIRST_POLL" : "10",
"AUTOSSH_GATETIME" : "0",
"AUTOSSH_POLL" : "30",
}
def connect(self):
if self.state in [STATE_CONNECTING , STATE_CONNECTED]:
self.disconnect()
return
self.saveConfig()
self.sshThread.start()
def disconnect(self):
self.sshThread.disconnect()
def minimize(self):
pass
def exit(self):
self.close()
pass
def about(self):
pass
def log(self, message):
logstr = time.strftime("<b>[%B %d, %H:%M:%S]</b> ")
logstr += message
self.historyList.append(logstr)
def switchState(self, state):
self.state = state
if state == STATE_NOTCONNECTED:
stateStr = "Not Connected"
self.connectButton.setText("Connect")
elif state == STATE_CONNECTING:
stateStr = "Connecting"
self.connectButton.setText("Disconnect")
else:
stateStr = "Connected"
self.connectButton.setText("Disconnect")
self.setWindowTitle("QTunneler - {}".format(stateStr))
def chooseMonitorPort(self):
self.monitorPort = random.randint(10000, 32767)
self.saveConfig()
def loadConfig(self):
setting = QtCore.QSettings("qtunneler")
self.serverAddressLineEdit.setText(setting.value("ServerAddress", "your.sshserver.com", type=QString))
self.serverPortLineEdit.setText(setting.value("ServerPort", "22", type=QString))
self.usernameLineEdit.setText(setting.value("Username", "username", type=QString))
self.passphraseLineEdit.setText(codecs.encode(setting.value("Passphrase", "", type=QString), "rot_13"))
self.socksPortEdit.setText(setting.value("SocksPort", "7070", type=QString))
self.httpPortEdit.setText(setting.value("HttpPort", "8080", type=QString))
self.identityEdit.setText(setting.value("Identity", "", type=QString))
self.optionalEdit.setText(setting.value("Optional", "", type=QString))
self.autoStartup.setCheckState(setting.value("AutoStartup", Qt.Unchecked, type=Qt.CheckState))
self.autoConnect.setCheckState(setting.value("AutoConnect", Qt.Unchecked, type=Qt.CheckState))
self.usingPolipo.setCheckState(setting.value("UsingPolipo", Qt.Unchecked, type=Qt.CheckState))
self.remeberPassowrd.setCheckState(setting.value("RememberPassword", Qt.Unchecked, type=Qt.CheckState))
self.promptOnExit.setCheckState(setting.value("PromptOnExit", Qt.Checked, type=Qt.CheckState))
def saveConfig(self):
setting = QtCore.QSettings("qtunneler")
setting.setValue("ServerAddress", self.serverAddressLineEdit.text())
setting.setValue("ServerPort", self.serverPortLineEdit.text())
setting.setValue("Username", self.usernameLineEdit.text())
password = ""
if self.remeberPassowrd.checkState():
password = codecs.encode(self.passphraseLineEdit.text(), "rot_13")
setting.setValue("Passphrase", password)
setting.setValue("SocksPort", self.socksPortEdit.text())
setting.setValue("HttpPort", self.httpPortEdit.text())
setting.setValue("Identity", self.identityEdit.text())
setting.setValue("Optional", self.optionalEdit.text())
setting.setValue("AutoStartup", self.autoStartup.checkState())
setting.setValue("AutoConnect", self.autoConnect.checkState())
setting.setValue("UsingPolipo", self.usingPolipo.checkState())
setting.setValue("RememberPassword", self.remeberPassowrd.checkState())
setting.setValue("PromptOnExit", self.promptOnExit.checkState())
def main():
app = QtGui.QApplication(sys.argv)
tunner = QTunneler()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
zhou13/qtunneler
|
qtunneler.py
|
Python
|
gpl-3.0
| 16,827 | 0.002853 |
# -*- coding: utf-8 -*-
"""
Copyright © Val Neekman ([Neekware Inc.](http://neekware.com))
[ info@neekware.com, [@vneekman](https://twitter.com/vneekman) ]
All rights reserved.
Redistribution and use in source and binary forms,
with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of this project nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
source: https://github.com/un33k/python-slugify
"""
__version__ = '0.0.7'
__all__ = ['slugify']
import re
import unicodedata
import types
import sys
from htmlentitydefs import name2codepoint
from unidecode import unidecode
# character entity reference
CHAR_ENTITY_REXP = re.compile('&(%s);' % '|'.join(name2codepoint))
# decimal character reference
DECIMAL_REXP = re.compile('&#(\d+);')
# hexadecimal character reference
HEX_REXP = re.compile('&#x([\da-fA-F]+);')
REPLACE1_REXP = re.compile(r'[\']+')
REPLACE2_REXP = re.compile(r'[^-a-z0-9]+')
REMOVE_REXP = re.compile('-{2,}')
def smart_truncate(string, max_length=0, word_boundaries=False, separator=' '):
""" Truncate a string """
string = string.strip(separator)
if not max_length:
return string
if len(string) < max_length:
return string
if not word_boundaries:
return string[:max_length].strip(separator)
if separator not in string:
return string[:max_length]
truncated = ''
for word in string.split(separator):
if word:
next_len = len(truncated) + len(word) + len(separator)
if next_len <= max_length:
truncated += '{0}{1}'.format(word, separator)
if not truncated:
truncated = string[:max_length]
return truncated.strip(separator)
def slugify(text, entities=True, decimal=True, hexadecimal=True, max_length=0,
word_boundary=False, separator='-'):
""" Make a slug from the given text """
# text to unicode
if type(text) != types.UnicodeType:
text = unicode(text, 'utf-8', 'ignore')
# decode unicode
text = unidecode(text)
# text back to unicode
if type(text) != types.UnicodeType:
text = unicode(text, 'utf-8', 'ignore')
# character entity reference
if entities:
text = CHAR_ENTITY_REXP.sub(lambda m:
unichr(name2codepoint[m.group(1)]), text)
# decimal character reference
if decimal:
try:
text = DECIMAL_REXP.sub(lambda m: unichr(int(m.group(1))), text)
except:
pass
# hexadecimal character reference
if hexadecimal:
try:
text = HEX_REXP.sub(lambda m: unichr(int(m.group(1), 16)), text)
except:
pass
# translate
text = unicodedata.normalize('NFKD', text)
if sys.version_info < (3,):
text = text.encode('ascii', 'ignore')
# replace unwanted characters
text = REPLACE1_REXP.sub('', text.lower())
# replace ' with nothing instead with -
text = REPLACE2_REXP.sub('-', text.lower())
# remove redundant -
text = REMOVE_REXP.sub('-', text).strip('-')
# smart truncate if requested
if max_length > 0:
text = smart_truncate(text, max_length, word_boundary, '-')
if separator != '-':
text = text.replace('-', separator)
return text
|
TheOstrichIO/tomato-cmd
|
slugify.py
|
Python
|
apache-2.0
| 4,674 | 0.001498 |
# Copyright 2020 Tensorforce Team. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import numpy as np
from tensorforce.environments import Environment
class CartPole(Environment):
"""
Based on OpenAI Gym version
(https://github.com/openai/gym/blob/master/gym/envs/classic_control/cartpole.py)
"""
def __init__(
self,
# Physics parameters
pole_mass=(0.05, 0.5), # 0.1
pole_length=(0.1, 1.0), # 0.5, actually half the pole's length
cart_mass=(0.5, 1.5),
relative_force=(0.75, 1.5), # 1.0, relative to gravity
gravity=9.8,
# State space
state_velocities=True,
state_location=False, # true
state_initial_max_angle=0.05,
state_initial_max_angle_velocity=0.05,
state_initial_max_velocity=0.05,
# Action space
action_timedelta=0.02,
action_continuous=False,
action_noop=True # false
):
super().__init__()
# Physics parameters
if isinstance(pole_mass, tuple):
assert len(pole_mass) == 2 and 0.0 < pole_mass[0] < pole_mass[1]
self.pole_mass_range = (float(pole_mass[0]), float(pole_mass[1]))
else:
assert pole_mass > 0.0
self.pole_mass_range = (float(pole_mass), float(pole_mass))
if isinstance(pole_length, tuple):
assert len(pole_length) == 2 and 0.0 < pole_length[0] < pole_length[1]
self.pole_length_range = (float(pole_length[0]), float(pole_length[1]))
else:
assert pole_length > 0.0
self.pole_length_range = (float(pole_length), float(pole_length))
if isinstance(cart_mass, tuple):
assert len(cart_mass) == 2 and 0.0 < cart_mass[0] < cart_mass[1]
self.cart_mass_range = (float(cart_mass[0]), float(cart_mass[1]))
else:
assert cart_mass > 0.0
self.cart_mass_range = (float(cart_mass), float(cart_mass))
if isinstance(relative_force, tuple):
assert len(relative_force) == 2 and 0.0 < relative_force[0] < relative_force[1]
self.relative_force_range = (float(relative_force[0]), float(relative_force[1]))
else:
assert relative_force > 0.0
self.relative_force_range = (float(relative_force), float(relative_force))
assert gravity > 0.0
self.gravity = float(gravity)
# State space
state_indices = [2]
self.state_velocities = bool(state_velocities)
if self.state_velocities:
state_indices.append(3)
state_indices.append(1)
self.state_location = bool(state_location)
if self.state_location:
state_indices.append(0)
self.state_indices = np.array(state_indices, np.int32)
self.state_initials = np.array([[
0.0, float(state_initial_max_velocity),
float(state_initial_max_angle), float(state_initial_max_angle_velocity)
]], dtype=np.float32)
# Action space
self.action_timedelta = float(action_timedelta) # in seconds
assert not action_continuous or action_noop
self.action_continuous = bool(action_continuous)
self.action_noop = bool(action_noop)
# State bounds
angle_bound = float(np.pi) / 4.0
max_angle_acc_in_zero = self.relative_force_range[1] * self.gravity / \
(self.cart_mass_range[0] + self.pole_mass_range[0]) / \
self.pole_length_range[0] / \
(4.0 / 3.0 - self.pole_mass_range[1] / (self.cart_mass_range[0] + self.pole_mass_range[0]))
min_angle_acc_in_zero = self.relative_force_range[0] * self.gravity / \
(self.cart_mass_range[1] + self.pole_mass_range[1]) / \
self.pole_length_range[1] / \
(4.0 / 3.0 - self.pole_mass_range[0] / (self.cart_mass_range[1] + self.pole_mass_range[1]))
max_loc_acc_in_zero = (self.relative_force_range[1] * self.gravity - \
self.pole_mass_range[0] * self.pole_length_range[0] * min_angle_acc_in_zero) / \
(self.cart_mass_range[0] + self.pole_mass_range[0])
angle_vel_bound = max_angle_acc_in_zero * self.action_timedelta * 10.0
loc_vel_bound = max_loc_acc_in_zero * self.action_timedelta * 10.0
if self.state_location:
loc_bound = loc_vel_bound
else:
loc_bound = np.inf
self.state_bounds = np.array(
[[loc_bound, loc_vel_bound, angle_bound, angle_vel_bound]], dtype=np.float32
)
assert (self.state_bounds > 0.0).all()
def states(self):
return dict(
type='float', shape=tuple(self.state_indices.shape),
min_value=-self.state_bounds[0, self.state_indices],
max_value=self.state_bounds[0, self.state_indices]
)
def actions(self):
if self.action_continuous:
return dict(type='float', shape=())
elif self.action_noop:
return dict(type='int', shape=(), num_values=3)
else:
return dict(type='int', shape=(), num_values=2)
def is_vectorizable(self):
return True
def reset(self, num_parallel=None):
# Physics parameters
self.pole_mass = float(np.random.uniform(low=self.pole_mass_range[0], high=self.pole_mass_range[1]))
self.pole_length = float(np.random.uniform(low=self.pole_length_range[0], high=self.pole_length_range[1]))
self.cart_mass = float(np.random.uniform(low=self.cart_mass_range[0], high=self.cart_mass_range[1]))
self.relative_force = float(np.random.uniform(low=self.relative_force_range[0], high=self.relative_force_range[1]))
if num_parallel is None:
initials = np.tile(self.state_initials, reps=(1, 1))
self.state = np.random.uniform(low=-initials, high=initials)
self.parallel_indices = None
return self.state[0, self.state_indices]
else:
initials = np.tile(self.state_initials, reps=(num_parallel, 1))
self.state = np.random.uniform(low=-initials, high=initials)
self.parallel_indices = np.arange(num_parallel)
return self.parallel_indices, self.state[:, self.state_indices]
def execute(self, actions):
assert self.state.shape[0] > 0
# Split state into components
loc = self.state[:, 0]
loc_vel = self.state[:, 1]
angle = self.state[:, 2]
angle_vel = self.state[:, 3]
# Make action continuous
actions = np.asarray(actions)
if self.parallel_indices is None:
actions = np.expand_dims(actions, axis=0)
else:
assert actions.shape[0] == self.parallel_indices.shape[0]
if self.action_continuous:
force = actions
else:
force = np.where(actions == 2, 0.0, np.where(actions == 1, 1.0, -1.0))
force *= self.relative_force * self.gravity
# Compute accelerations (https://coneural.org/florian/papers/05_cart_pole.pdf)
cos_angle = np.cos(angle)
sin_angle = np.sin(angle)
total_mass = self.cart_mass + self.pole_mass
pole_mass_length = self.pole_mass * self.pole_length
bracket = (force + pole_mass_length * angle_vel * angle_vel * sin_angle) / total_mass
denom = self.pole_length * (4.0 / 3.0 - (self.pole_mass * cos_angle * cos_angle) / total_mass)
angle_acc = (self.gravity * sin_angle - cos_angle * bracket) / denom
loc_acc = bracket - pole_mass_length * angle_acc * cos_angle / total_mass
# Integration
deriv = np.stack([loc_vel, loc_acc, angle_vel, angle_acc], axis=1)
self.state += self.action_timedelta * deriv
# Terminal
terminal = (np.abs(self.state) > self.state_bounds).any(axis=1)
# Reward
reward = np.ones_like(terminal, dtype=np.float32)
if self.parallel_indices is None:
state = self.state[0, self.state_indices]
self.state = self.state[~terminal]
return state, terminal.item(), reward.item()
else:
self.parallel_indices = self.parallel_indices[~terminal]
self.state = self.state[~terminal]
return self.parallel_indices, self.state[:, self.state_indices], terminal, reward
|
reinforceio/tensorforce
|
tensorforce/environments/cartpole.py
|
Python
|
apache-2.0
| 8,953 | 0.002457 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('latest_tweets', '0010_photo_unique'),
]
operations = [
migrations.AddField(
model_name='photo',
name='image_file',
field=models.ImageField(blank=True, upload_to='latest_tweets/photo'),
),
]
|
blancltd/django-latest-tweets
|
latest_tweets/migrations/0011_photo_image_file.py
|
Python
|
bsd-3-clause
| 434 | 0.002304 |
from edc_constants.constants import YES, NEG
from .base_test_case import BaseTestCase
from .factories import MaternalConsentFactory, MaternalEligibilityFactory, PostnatalEnrollmentFactory
class TestMaternalLocator(BaseTestCase):
def setUp(self):
super(TestMaternalLocator, self).setUp()
self.maternal_eligibility = MaternalEligibilityFactory()
self.maternal_consent = MaternalConsentFactory(registered_subject=self.maternal_eligibility.registered_subject)
self.registered_subject = self.maternal_consent.registered_subject
PostnatalEnrollmentFactory(
registered_subject=self.registered_subject,
current_hiv_status=NEG,
evidence_hiv_status=YES,
rapid_test_done=YES,
rapid_test_result=NEG)
def test_maternal_locator(self):
pass
|
botswana-harvard/microbiome
|
microbiome/apps/mb_maternal/tests/test_maternal_locator.py
|
Python
|
gpl-2.0
| 847 | 0.002361 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import json
import operator
from django.core.exceptions import PermissionDenied
from django.db.models import ProtectedError, Q
from django.forms.models import modelform_factory
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.functional import cached_property
from django.views.generic import View
from pootle.core.http import (
JsonResponse, JsonResponseBadRequest, JsonResponseForbidden,
JsonResponseNotFound
)
class JSONDecodeError(ValueError):
pass
class APIView(View):
"""View to implement internal RESTful APIs.
Based on djangbone https://github.com/af/djangbone
"""
# Model on which this view operates. Setting this is required
model = None
# Base queryset for accessing data. If `None`, model's default manager will
# be used
base_queryset = None
# Set this to restrict the view to a subset of the available methods
restrict_to_methods = None
# Field names to be included
fields = ()
# Individual forms to use for each method. By default it'll auto-populate
# model forms built using `self.model` and `self.fields`
add_form_class = None
edit_form_class = None
# Permission classes implement logic to determine whether the request
# should be permitted. Empty list means no permission-checking.
permission_classes = []
# Tuple of sensitive field names that will be excluded from any serialized
# responses
sensitive_field_names = ('password', 'pw')
# Set to an integer to enable GET pagination
page_size = None
# HTTP GET parameter to use for accessing pages
page_param_name = 'p'
# HTTP GET parameter to use for search queries
search_param_name = 'q'
# Field names in which searching will be allowed
search_fields = None
@property
def allowed_methods(self):
methods = [m for m in self.http_method_names if hasattr(self, m)]
if self.restrict_to_methods is not None:
restricted_to = map(lambda x: x.lower(), self.restrict_to_methods)
methods = filter(lambda x: x in restricted_to, methods)
return methods
def __init__(self, *args, **kwargs):
if self.model is None:
raise ValueError('No model class specified.')
self.pk_field_name = self.model._meta.pk.name
if self.base_queryset is None:
self.base_queryset = self.model._default_manager
self._init_fields()
self._init_forms()
return super(APIView, self).__init__(*args, **kwargs)
def _init_fields(self):
if len(self.fields) < 1:
form = self.add_form_class or self.edit_form_class
if form is not None:
self.fields = form._meta.fields
else: # Assume all fields by default
self.fields = (f.name for f in self.model._meta.fields)
self.serialize_fields = (f for f in self.fields if
f not in self.sensitive_field_names)
def _init_forms(self):
if 'post' in self.allowed_methods and self.add_form_class is None:
self.add_form_class = modelform_factory(self.model,
fields=self.fields)
if 'put' in self.allowed_methods and self.edit_form_class is None:
self.edit_form_class = modelform_factory(self.model,
fields=self.fields)
@cached_property
def request_data(self):
try:
return json.loads(self.request.body)
except ValueError:
raise JSONDecodeError
def get_permissions(self):
"""Returns permission handler instances required for a particular view."""
return [permission() for permission in self.permission_classes]
def check_permissions(self, request):
"""Checks whether the view is allowed to process the request or not.
"""
for permission in self.get_permissions():
if not permission.has_permission(request, self):
raise PermissionDenied
def check_object_permissions(self, request, obj):
for permission in self.get_permissions():
if not permission.has_object_permission(request, self, obj):
raise PermissionDenied
def handle_exception(self, exc):
"""Handles response exceptions."""
if isinstance(exc, Http404):
return JsonResponseNotFound({
'msg': 'Not found',
})
if isinstance(exc, PermissionDenied):
return JsonResponseForbidden({
'msg': 'Permission denied.',
})
if isinstance(exc, JSONDecodeError):
return JsonResponseBadRequest({
'msg': 'Invalid JSON data',
})
raise
def dispatch(self, request, *args, **kwargs):
try:
self.check_permissions(request)
if request.method.lower() in self.allowed_methods:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
return handler(request, *args, **kwargs)
except Exception as exc:
return self.handle_exception(exc)
def get(self, request, *args, **kwargs):
"""GET handler."""
if self.kwargs.get(self.pk_field_name, None) is not None:
object = self.get_object()
return JsonResponse(self.object_to_values(object))
return self.get_collection(request, *args, **kwargs)
def get_object(self):
"""Returns a single model instance."""
obj = get_object_or_404(
self.base_queryset, pk=self.kwargs[self.pk_field_name],
)
self.check_object_permissions(self.request, obj)
return obj
def get_collection(self, request, *args, **kwargs):
"""Retrieve a full collection."""
return JsonResponse(self.qs_to_values(self.base_queryset))
def get_form_kwargs(self):
kwargs = {
'data': self.request_data,
}
if (self.pk_field_name in self.kwargs and
self.kwargs[self.pk_field_name] is not None):
kwargs.update({
'instance': self.get_object(),
})
return kwargs
def post(self, request, *args, **kwargs):
"""Creates a new model instance.
The form to be used can be customized by setting
`self.add_form_class`. By default a model form will be used with
the fields from `self.fields`.
"""
form = self.add_form_class(**self.get_form_kwargs())
if form.is_valid():
new_object = form.save()
return JsonResponse(self.object_to_values(new_object))
return self.form_invalid(form)
def put(self, request, *args, **kwargs):
"""Update the current model."""
if self.pk_field_name not in self.kwargs:
return self.status_msg('PUT is not supported for collections',
status=405)
form = self.edit_form_class(**self.get_form_kwargs())
if form.is_valid():
updated_object = form.save()
return JsonResponse(self.object_to_values(updated_object))
return self.form_invalid(form)
def delete(self, request, *args, **kwargs):
"""Delete the model and return its JSON representation."""
if self.pk_field_name not in kwargs:
return self.status_msg('DELETE is not supported for collections',
status=405)
obj = self.get_object()
try:
obj.delete()
return JsonResponse({})
except ProtectedError as e:
return self.status_msg(e[0], status=405)
def object_to_values(self, object):
"""Convert an object to values for serialization."""
return {
field: getattr(object, field) for field in self.serialize_fields
}
def qs_to_values(self, queryset):
"""Convert a queryset to values for further serialization.
An array of objects in `models` and the total object count in
`count` is returned.
"""
search_keyword = self.request.GET.get(self.search_param_name, None)
if search_keyword is not None:
filter_by = self.get_search_filter(search_keyword)
queryset = queryset.filter(filter_by)
values = queryset.values(*self.serialize_fields)
# Process pagination options if they are enabled
if isinstance(self.page_size, int):
try:
page_param = self.request.GET.get(self.page_param_name, 1)
page_number = int(page_param)
offset = (page_number - 1) * self.page_size
except ValueError:
offset = 0
values = values[offset:offset+self.page_size]
return_values = {
'models': list(values),
'count': queryset.count(),
}
return return_values
def get_search_filter(self, keyword):
search_fields = getattr(self, 'search_fields', None)
if search_fields is None:
search_fields = self.fields # Assume all fields
field_queries = list(
zip(map(lambda x: '%s__icontains' % x, search_fields),
(keyword,)*len(search_fields))
)
lookups = [Q(x) for x in field_queries]
return reduce(operator.or_, lookups)
def status_msg(self, msg, status=400):
return JsonResponse({'msg': msg}, status=status)
def form_invalid(self, form):
return JsonResponse({'errors': form.errors}, status=400)
|
iafan/zing
|
pootle/core/views/api.py
|
Python
|
gpl-3.0
| 10,141 | 0.000197 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
from datetime import timedelta
from django.utils import timezone
from sentry.models import GroupResolution
from sentry.testutils import TestCase
class GroupResolutionTest(TestCase):
def setUp(self):
super(GroupResolutionTest, self).setUp()
self.old_release = self.create_release(
version='a',
project=self.project,
)
self.old_release.update(
date_added=timezone.now() - timedelta(minutes=30),
)
self.new_release = self.create_release(
version='b',
project=self.project,
)
self.group = self.create_group()
def test_in_next_release_with_new_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_next_release,
)
assert not GroupResolution.has_resolution(self.group, self.new_release)
def test_in_next_release_with_same_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_next_release,
)
assert GroupResolution.has_resolution(self.group, self.old_release)
def test_in_next_release_with_old_release(self):
GroupResolution.objects.create(
release=self.new_release,
group=self.group,
type=GroupResolution.Type.in_next_release,
)
assert GroupResolution.has_resolution(self.group, self.old_release)
def test_in_release_with_new_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert not GroupResolution.has_resolution(self.group, self.new_release)
def test_in_release_with_current_release(self):
GroupResolution.objects.create(
release=self.old_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert not GroupResolution.has_resolution(self.group, self.old_release)
def test_in_release_with_old_release(self):
GroupResolution.objects.create(
release=self.new_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert GroupResolution.has_resolution(self.group, self.old_release)
def test_no_release_with_resolution(self):
GroupResolution.objects.create(
release=self.new_release,
group=self.group,
type=GroupResolution.Type.in_release,
)
assert GroupResolution.has_resolution(self.group, None)
def test_no_release_with_no_resolution(self):
assert not GroupResolution.has_resolution(self.group, None)
|
ifduyue/sentry
|
tests/sentry/models/test_groupresolution.py
|
Python
|
bsd-3-clause
| 2,921 | 0 |
# -*- coding: utf-8 -*-
'''A module to represent the text content of a playing card at a high level.'''
# This file is part of CBG.
#
# CBG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CBG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CBG. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2014-2016 Viktor Eikman
import itertools
import logging
from cbg.content import elements
from cbg.content import field
class Card(elements.DerivedFromSpec, field.Layout):
'''The content of a unique playing card, as a "master" of sorts.
Content is tracked by field type, and each field type has its own
class, listed in the "plan" class attribute.
If there is a field corresponding to a title, it should generally
be populated first and use the "key_title" attribute of the card
class as its key, because that way its content will appear in
exception messages etc., to help debug subsequent problems.
The number of copies in a deck is tracked at the deck level, not here.
'''
_untitled_base = 'untitled card'
_untitled_iterator = itertools.count(start=1)
def layout(self):
'''Put data from incoming raws into empty fields.'''
if not self.specification:
s = 'No specification data for the "{}" card.'
raise self.SpecificationError(s.format(self))
try:
super().layout()
except:
s = 'An error occurred while processing the "{}" card.'
logging.error(s.format(self))
raise
if self.specification:
for key, value in self.specification.items():
s = 'Unrecognized data key "{}" not consumed: "{}".'
logging.error(s.format(key, value))
s = 'Specification data for the "{}" card was not consumed.'
raise self.SpecificationError(s.format(self))
def not_in_spec(self):
s = 'Specification of "{}" card inadequate for basic layout.'
raise self.SpecificationError(s.format(self))
@property
def title(self):
'''Quick access to the card's title field's processed value, if any.
In the absence of a title field, for the moment, use a stable
generated title.
'''
try:
field = str(self.child_by_key_required(self.key_title))
if field:
# In spec.
return str(field)
except:
pass
try:
return self._generated_title
except AttributeError:
return self._untitled_base
@property
def card(self):
'''An override of a field method.'''
return self
@property
def _sorting_signature(self):
'''Salient properties of self, for sorting purposes.
To be overridden for card types with other salient properties.
'''
return str(self.deck), str(self)
def __eq__(self, other):
'''Used for sorting (as performed by decks).'''
try:
return self._sorting_signature == other._sorting_signature
except AttributeError:
return False
def __lt__(self, other):
'''Used for sorting (as performed by decks).
Notice that this method and __eq__ cannot be used with
functools.total_ordering, because that decorator will not override
inherited comparison methods from our parent classes.
'''
try:
return self._sorting_signature < other._sorting_signature
except AttributeError:
s = 'Tried to sort {} relative to incompatible {}.'
raise TypeError(s.format(type(self), type(other)))
def __ne__(self, other):
return not self == other
def __le__(self, other):
return self < other or self == other
def __gt__(self, other):
return not self < other
def __ge__(self, other):
return self > other or self == other
def __str__(self):
return self.title
def __hash__(self):
'''Treat as if immutable, because decks are counters (hash tables).'''
return hash(id(self))
|
veikman/cbg
|
cbg/content/card.py
|
Python
|
gpl-3.0
| 4,586 | 0.000436 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import time
try:
from f5.bigip import ManagementRoot
from icontrol.exceptions import iControlUnexpectedHTTPError
HAS_F5SDK = True
except ImportError:
HAS_F5SDK = False
try:
from library.module_utils.network.f5.common import F5BaseClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.icontrol import iControlRestSession
except ImportError:
from ansible.module_utils.network.f5.common import F5BaseClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.icontrol import iControlRestSession
class F5Client(F5BaseClient):
def __init__(self, *args, **kwargs):
super(F5Client, self).__init__(*args, **kwargs)
self.provider = self.merge_provider_params()
@property
def api(self):
exc = None
if self._client:
return self._client
for x in range(0, 10):
try:
result = ManagementRoot(
self.provider['server'],
self.provider['user'],
self.provider['password'],
port=self.provider['server_port'],
verify=self.provider['validate_certs'],
token='tmos'
)
self._client = result
return self._client
except Exception as ex:
exc = ex
time.sleep(1)
error = 'Unable to connect to {0} on port {1}.'.format(
self.provider['server'], self.provider['server_port']
)
if exc is not None:
error += ' The reported error was "{0}".'.format(str(exc))
raise F5ModuleError(error)
class F5RestClient(F5BaseClient):
def __init__(self, *args, **kwargs):
super(F5RestClient, self).__init__(*args, **kwargs)
self.provider = self.merge_provider_params()
@property
def api(self):
exc = None
if self._client:
return self._client
for x in range(0, 10):
try:
url = "https://{0}:{1}/mgmt/shared/authn/login".format(
self.provider['server'], self.provider['server_port']
)
payload = {
'username': self.provider['user'],
'password': self.provider['password'],
'loginProviderName': self.provider['auth_provider'] or 'tmos'
}
session = iControlRestSession()
session.verify = self.provider['validate_certs']
response = session.post(url, json=payload)
if response.status not in [200]:
raise F5ModuleError('Status code: {0}. Unexpected Error: {1} for uri: {2}\nText: {3}'.format(
response.status, response.reason, response.url, response._content
))
session.headers['X-F5-Auth-Token'] = response.json()['token']['token']
self._client = session
return self._client
except Exception as ex:
exc = ex
time.sleep(1)
error = 'Unable to connect to {0} on port {1}.'.format(
self.provider['server'], self.provider['server_port']
)
if exc is not None:
error += ' The reported error was "{0}".'.format(str(exc))
raise F5ModuleError(error)
|
mheap/ansible
|
lib/ansible/module_utils/network/f5/bigip.py
|
Python
|
gpl-3.0
| 3,738 | 0.001338 |
from textx.exceptions import TextXSemanticError
def query_processor(query):
if not query.condition is None:
query.condition.conditionName = adapter_for_query(query)
for query in query.parent.queries:
if (not hasattr(query, 'property')) and (query.sortBy not in query.parent.properties):
line, col = query.parent._tx_metamodel.parser.pos_to_linecol(
object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has no property named %s." %
(line, col, query.parent.object.name, query.parent.property.name))
elif (not hasattr(query, 'sortBy')) and (query.sortBy not in query.parent.properties):
line, col = query.parent._tx_metamodel.parser.pos_to_linecol(
object._tx_position)
raise TextXSemanticError("ERROR: (at %d, %d) Object %s has no property named %s." %
(line, col, query.parent.object.name, query.parent.property.name))
else:
return True
def adapter_for_query(queryObject):
try:
return {
'lowerThan': 'lt',
'greaterThan': 'gt',
'lessEqual': 'le',
'greaterEqual': 'ge',
'equal': 'e'
}[queryObject.condition.conditionName]
except:
return queryObject.condition.conditionName
class Query(object):
def __init__(self, parent, name, property=None, condition=None,sortBy=None, order=None, rangeFrom=None, rangeTo=None ):
self.name = name
self.parent = parent
self.property = property
self.condition = condition
self.sortBy = sortBy
self.order=order
self.rangeFrom = rangeFrom
self.rangeTo = rangeTo
|
theshammy/GenAn
|
src/concepts/query.py
|
Python
|
mit
| 1,811 | 0.009939 |
# -*- encoding:utf8 -*-
import cStringIO
import qrcode
class QRCodeCreator():
def __init__(self):
pass
def create(self, message):
qr = qrcode.QRCode(
version=1,
error_correction=qrcode.constants.ERROR_CORRECT_L,
box_size=10,
border=4,
)
qr.add_data(message)
qr.make(fit=True)
img = qr.make_image()
img_buf = cStringIO.StringIO()
img.save(img_buf)
img_buf.seek(0)
return img_buf
|
supistar/Botnyan
|
model/qrcreator.py
|
Python
|
mit
| 520 | 0 |
#! /user/bin/env python
__version__ = '0.8.53'
|
jalanb/dotjab
|
src/python/__init__.py
|
Python
|
mit
| 48 | 0 |
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
# This is a place to add your own dumpers for testing purposes.
# Any contents here will be picked up by GDB and LLDB based
# debugging in Qt Creator automatically. This code is not used
# when debugging with CDB on Windows.
# NOTE: This file will get overwritten when updating Qt Creator.
#
# To add dumpers that don't get overwritten, copy this file here
# to a safe location outside the Qt Creator installation and
# make this location known to Qt Creator using the Debugger /
# GDB / Dumper customization / Additional file setting.
# Example to display a simple type
# template<typename U, typename V> struct MapNode
# {
# U key;
# V data;
# }
#
# def qdump__MapNode(d, value):
# d.putValue("This is the value column contents")
# d.putNumChild(2)
# if d.isExpanded():
# with Children(d):
# # Compact simple case.
# d.putSubItem("key", value["key"])
# # Same effect, with more customization possibilities.
# with SubItem(d, "data")
# d.putItem("data", value["data"])
# Check http://doc.qt.io/qtcreator/creator-debugging-helpers.html
# for more details or look at qttypes.py, stdtypes.py, boosttypes.py
# for more complex examples.
from dumper import *
#def qdump__Pin(d,value):
# d.putValue('%s %s' % (value['id'].integer(), value['index'].integer()))
# d.putNumChild(2)
# if d.isExpanded():
# with Children(d):
# d.putSubItem("id",value["id"])
# d.putSubItem("index",value["index"])
######################## Your code below #######################
|
stdgregwar/elve
|
personaltypes.py
|
Python
|
lgpl-3.0
| 2,750 | 0.000727 |
from __future__ import unicode_literals
import django
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.test import TestCase, Client
from django.test.client import RequestFactory
from django.test.utils import override_settings
from custard.conf import (CUSTOM_TYPE_TEXT, CUSTOM_TYPE_INTEGER,
CUSTOM_TYPE_BOOLEAN, CUSTOM_TYPE_FLOAT,
CUSTOM_TYPE_DATE, CUSTOM_TYPE_DATETIME,
CUSTOM_TYPE_TIME, settings)
from custard.builder import CustomFieldsBuilder
from custard.utils import import_class
from .models import (SimpleModelWithManager, SimpleModelWithoutManager,
CustomFieldsModel, CustomValuesModel, builder)
#==============================================================================
class SimpleModelWithManagerForm(builder.create_modelform()):
class Meta:
model = SimpleModelWithManager
fields = '__all__'
#class ExampleAdmin(admin.ModelAdmin):
# form = ExampleForm
# search_fields = ('name',)
#
# def get_search_results(self, request, queryset, search_term):
# queryset, use_distinct = super(ExampleAdmin, self).get_search_results(request, queryset, search_term)
# queryset |= self.model.objects.search(search_term)
# return queryset, use_distinct
#
# admin.site.register(Example, ExampleAdmin)
#==============================================================================
class CustomModelsTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.simple_with_manager_ct = ContentType.objects.get_for_model(SimpleModelWithManager)
self.simple_without_manager_ct = ContentType.objects.get_for_model(SimpleModelWithoutManager)
self.cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='text_field',
label="Text field",
data_type=CUSTOM_TYPE_TEXT)
self.cf.save()
self.cf2 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='another_text_field',
label="Text field 2",
data_type=CUSTOM_TYPE_TEXT,
required=True,
searchable=False)
self.cf2.clean()
self.cf2.save()
self.cf3 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='int_field', label="Integer field",
data_type=CUSTOM_TYPE_INTEGER)
self.cf3.save()
self.cf4 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='boolean_field', label="Boolean field",
data_type=CUSTOM_TYPE_BOOLEAN)
self.cf4.save()
self.cf5 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='float_field', label="Float field",
data_type=CUSTOM_TYPE_FLOAT)
self.cf5.save()
self.cf6 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='date_field', label="Date field",
data_type=CUSTOM_TYPE_DATE)
self.cf6.save()
self.cf7 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='datetime_field', label="Datetime field",
data_type=CUSTOM_TYPE_DATETIME)
self.cf7.save()
self.cf8 = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='time_field', label="Time field",
data_type=CUSTOM_TYPE_TIME)
self.cf8.save()
self.obj = SimpleModelWithManager.objects.create(name='old test')
self.obj.save()
def tearDown(self):
CustomFieldsModel.objects.all().delete()
def test_import_class(self):
self.assertEqual(import_class('custard.builder.CustomFieldsBuilder'), CustomFieldsBuilder)
def test_model_repr(self):
self.assertEqual(repr(self.cf), "<CustomFieldsModel: text_field>")
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="abcdefg")
val.save()
self.assertEqual(repr(val), "<CustomValuesModel: text_field: abcdefg>")
@override_settings(CUSTOM_CONTENT_TYPES=['simplemodelwithmanager'])
def test_field_creation(self):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel',
'tests.CustomValuesModel',
settings.CUSTOM_CONTENT_TYPES)
class TestCustomFieldsModel(builder2.create_fields()):
class Meta:
app_label = 'tests'
self.assertQuerysetEqual(ContentType.objects.filter(builder2.content_types_query),
ContentType.objects.filter(Q(name__in=['simplemodelwithmanager'])))
def test_mixin(self):
self.assertIn(self.cf, self.obj.get_custom_fields())
self.assertIn(self.cf, SimpleModelWithManager.get_model_custom_fields())
self.assertEqual(self.cf, self.obj.get_custom_field('text_field'))
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="123456")
val.save()
self.assertEqual("123456", self.obj.get_custom_value('text_field'))
self.obj.set_custom_value('text_field', "abcdefg")
self.assertEqual("abcdefg", self.obj.get_custom_value('text_field'))
val.delete()
def test_field_model_clean(self):
cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='another_text_field',
label="Text field already present",
data_type=CUSTOM_TYPE_INTEGER)
with self.assertRaises(ValidationError):
cf.full_clean()
cf = CustomFieldsModel.objects.create(content_type=self.simple_with_manager_ct,
name='name',
label="Text field already in model",
data_type=CUSTOM_TYPE_TEXT)
with self.assertRaises(ValidationError):
cf.full_clean()
def test_value_model_clean(self):
val = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk)
val.value = "qwertyuiop"
val.save()
val = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk)
val.value = "qwertyuiop"
with self.assertRaises(ValidationError):
val.full_clean()
def test_value_creation(self):
val = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="qwertyuiop")
val.save()
self.assertEqual(val.content_type, self.simple_with_manager_ct)
self.assertEqual(val.content_type, val.custom_field.content_type)
self.assertEqual(val.value_text, "qwertyuiop")
self.assertEqual(val.value, "qwertyuiop")
def test_value_search(self):
newobj = SimpleModelWithManager.objects.create(name='new simple')
newobj.save()
v1 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="qwertyuiop")
v1.save()
v2 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=newobj.pk,
value="qwertyuiop")
v2.save()
v3 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=newobj.pk,
value="000asdf123")
v3.save()
qs1 = SimpleModelWithManager.objects.search("asdf")
self.assertQuerysetEqual(qs1, [repr(newobj)])
qs2 = SimpleModelWithManager.objects.search("qwerty")
self.assertQuerysetEqual(qs2, [repr(self.obj), repr(newobj)], ordered=False)
def test_value_search_not_searchable_field(self):
v1 = CustomValuesModel.objects.create(custom_field=self.cf,
object_id=self.obj.pk,
value="12345")
v1.save()
v2 = CustomValuesModel.objects.create(custom_field=self.cf2,
object_id=self.obj.pk,
value="67890")
v2.save()
qs1 = SimpleModelWithManager.objects.search("12345")
self.assertQuerysetEqual(qs1, [repr(self.obj)])
qs2 = SimpleModelWithManager.objects.search("67890")
self.assertQuerysetEqual(qs2, [])
def test_get_formfield_for_field(self):
with self.settings(CUSTOM_FIELD_TYPES={CUSTOM_TYPE_TEXT: 'django.forms.fields.EmailField'}):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel', 'tests.CustomValuesModel')
class SimpleModelWithManagerForm2(builder2.create_modelform(field_types=settings.CUSTOM_FIELD_TYPES)):
class Meta:
model = SimpleModelWithManager
fields = '__all__'
form = SimpleModelWithManagerForm2(data={}, instance=self.obj)
self.assertIsNotNone(form.get_formfield_for_field(self.cf))
self.assertEqual(django.forms.fields.EmailField, form.get_formfield_for_field(self.cf).__class__)
def test_get_widget_for_field(self):
with self.settings(CUSTOM_WIDGET_TYPES={CUSTOM_TYPE_TEXT: 'django.forms.widgets.CheckboxInput'}):
builder2 = CustomFieldsBuilder('tests.CustomFieldsModel', 'tests.CustomValuesModel')
class SimpleModelWithManagerForm2(builder2.create_modelform(widget_types=settings.CUSTOM_WIDGET_TYPES)):
class Meta:
fields = '__all__'
model = SimpleModelWithManager
form = SimpleModelWithManagerForm2(data={}, instance=self.obj)
self.assertIsNotNone(form.get_widget_for_field(self.cf))
self.assertEqual(django.forms.widgets.CheckboxInput, form.get_widget_for_field(self.cf).__class__)
def test_form(self):
class TestForm(builder.create_modelform()):
custom_name = 'My Custom Fields'
custom_description = 'Edit the Example custom fields here'
custom_classes = 'zzzap-class'
class Meta:
fields = '__all__'
model = SimpleModelWithManager
request = self.factory.post('/', { 'text_field': '123' })
form = TestForm(request.POST, instance=self.obj)
self.assertFalse(form.is_valid())
self.assertIn('another_text_field', form.errors)
self.assertRaises(ValueError, lambda: form.save())
request = self.factory.post('/', { 'id': self.obj.pk,
'name': 'xxx',
'another_text_field': 'wwwzzzyyyxxx' })
form = TestForm(request.POST, instance=self.obj)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(self.obj.get_custom_value('another_text_field'), 'wwwzzzyyyxxx')
self.assertEqual(self.obj.name, 'xxx')
#self.assertInHTML(TestForm.custom_name, form.as_p())
#self.assertInHTML(TestForm.custom_description, form.as_p())
#self.assertInHTML(TestForm.custom_classes, form.as_p())
def test_admin(self):
modeladmin_class = builder.create_modeladmin()
#c = Client()
#if c.login(username='fred', password='secret'):
# response = c.get('/admin/', follow=True)
# print(response)
|
quamilek/django-custard
|
custard/tests/test.py
|
Python
|
mit
| 13,067 | 0.004362 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import budgetdatapackage
import datapackage
import datetime
from nose.tools import raises
from datapackage import compat
class TestBudgetResource(object):
def setup(self):
self.values = {
'currency': 'ISK',
'dateLastUpdated': '2014-04-22',
'datePublished': '1982-04-22',
'fiscalYear': '2014',
'granularity': 'transactional',
'status': 'approved',
'type': 'expenditure',
'location': 'IS',
'url': 'http://iceland.is/budgets.csv'}
def test_create_resource(self):
resource = budgetdatapackage.BudgetResource(**self.values)
assert resource.currency == self.values['currency']
last_updated = datetime.datetime.strptime(
self.values['dateLastUpdated'], '%Y-%m-%d').date()
assert resource.dateLastUpdated == last_updated
published = datetime.datetime.strptime(
self.values['datePublished'], '%Y-%m-%d').date()
assert resource.datePublished == published
assert resource.fiscalYear == self.values['fiscalYear']
assert resource.granularity == self.values['granularity']
assert resource.status == self.values['status']
assert resource.type == self.values['type']
assert resource.location == self.values['location']
assert resource.url == self.values['url']
assert resource.standard == '1.0.0-alpha'
def test_resource_can_be_used_with_datapackage(self):
"""Checks if it's possible to create a datapackage with a
budget resource"""
moneys = budgetdatapackage.BudgetResource(**self.values)
finances = datapackage.DataPackage(
name="finances", license="PDDL", resources=[moneys])
assert finances.name == "finances"
assert len(finances.resources) == 1
assert finances.resources[0].granularity == self.values['granularity']
@raises(ValueError)
def test_create_resource_missing_required_field(self):
del self.values['fiscalYear']
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_currency(self):
self.values['currency'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_dateLastPublished(self):
self.values['dateLastUpdated'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_datePublished(self):
self.values['datePublished'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_fiscalYear(self):
self.values['fiscalYear'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_granularity(self):
self.values['granularity'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_status(self):
self.values['status'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_type(self):
self.values['type'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
@raises(ValueError)
def test_bad_location(self):
self.values['location'] = 'batman'
budgetdatapackage.BudgetResource(**self.values)
|
trickvi/budgetdatapackage
|
tests/test_resource.py
|
Python
|
gpl-3.0
| 3,567 | 0 |
import asynchat
import socket
import errno
class vischat (asynchat.async_chat):
def __init__ (self, host, port):
self.host = host
self.port = port
self.outstanding = []
self.lines = []
self.buffer = ""
asynchat.async_chat.__init__ (self)
def handle_connect (self):
err = self.getsockopt (socket.SOL_SOCKET, socket.SO_ERROR)
if err == errno.ECONNREFUSED:
self.connect_cb (None)
else:
self.connect_cb (self)
def start_connect (self, cb):
self.create_socket (socket.AF_INET, socket.SOCK_STREAM)
self.connect ((self.host, self.port))
self.set_terminator ("\n")
self.connect_cb = cb
def collect_incoming_data (self, data):
self.buffer += data
def found_terminator (self):
# Assumes that vis handles all request in order.
# print "### %s" % self.buffer
if self.buffer[0] == '.':
z = self.outstanding.pop (0)
if z:
z (self.lines)
self.lines = []
else:
self.lines.append (self.buffer)
self.buffer = ""
# Each command here is a front-end to a real message that could get
# sent to vis, and a callback that should notify
def list (self, cb):
self.push ("list\n")
self.outstanding.append (cb)
def arc (self, a, b):
self.push ("arc %s %s\n" % (a, b))
self.outstanding.append (None)
def arrow (self, a, b):
self.push ("arrow %s %s\n" % (a, b))
self.outstanding.append (None)
def reset (self):
self.push ("reset\n")
self.outstanding.append (None)
def highlight (self, a):
self.push ("highlight %s\n" % a)
self.outstanding.append (None)
def select (self, a):
self.push ("select %s\n" % a)
self.outstanding.append (None)
|
sit/dht
|
tools/vischat.py
|
Python
|
mit
| 1,908 | 0.021488 |
# encoding: utf-8
# FastCGI-to-WSGI bridge for files/pipes transport (not socket)
#
# Copyright (c) 2002, 2003, 2005, 2006 Allan Saddi <allan@saddi.com>
# Copyright (c) 2011 - 2013 Ruslan Keba <ruslan@helicontech.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__author__ = 'Allan Saddi <allan@saddi.com>, Ruslan Keba <ruslan@helicontech.com>'
import msvcrt
import struct
import os
import logging
import sys
import traceback
import datetime
import urllib
from optparse import OptionParser
# debug flag
__dbg__ = False
# Constants from the spec.
FCGI_LISTENSOCK_FILENO = 0
FCGI_HEADER_LEN = 8
FCGI_VERSION_1 = 1
FCGI_BEGIN_REQUEST = 1
FCGI_ABORT_REQUEST = 2
FCGI_END_REQUEST = 3
FCGI_PARAMS = 4
FCGI_STDIN = 5
FCGI_STDOUT = 6
FCGI_STDERR = 7
FCGI_DATA = 8
FCGI_GET_VALUES = 9
FCGI_GET_VALUES_RESULT = 10
FCGI_UNKNOWN_TYPE = 11
FCGI_MAXTYPE = FCGI_UNKNOWN_TYPE
FCGI_NULL_REQUEST_ID = 0
FCGI_KEEP_CONN = 1
FCGI_RESPONDER = 1
FCGI_AUTHORIZER = 2
FCGI_FILTER = 3
FCGI_REQUEST_COMPLETE = 0
FCGI_CANT_MPX_CONN = 1
FCGI_OVERLOADED = 2
FCGI_UNKNOWN_ROLE = 3
FCGI_MAX_CONNS = 'FCGI_MAX_CONNS'
FCGI_MAX_REQS = 'FCGI_MAX_REQS'
FCGI_MPXS_CONNS = 'FCGI_MPXS_CONNS'
FCGI_Header = '!BBHHBx'
FCGI_BeginRequestBody = '!HB5x'
FCGI_EndRequestBody = '!LB3x'
FCGI_UnknownTypeBody = '!B7x'
FCGI_EndRequestBody_LEN = struct.calcsize(FCGI_EndRequestBody)
FCGI_UnknownTypeBody_LEN = struct.calcsize(FCGI_UnknownTypeBody)
FCGI_HEADER_NAMES = (
'ERROR TYPE: 0',
'BEGIN_REQUEST',
'ABORT_REQUEST',
'END_REQUEST',
'PARAMS',
'STDIN',
'STDOUT',
'STDERR',
'DATA',
'GET_VALUES',
'GET_VALUES_RESULT',
'UNKNOWN_TYPE',
)
class InputStream(object):
"""
File-like object representing FastCGI input streams (FCGI_STDIN and
FCGI_DATA). Supports the minimum methods required by WSGI spec.
"""
def __init__(self, conn):
self._conn = conn
# See Server.
self._shrinkThreshold = conn.server.inputStreamShrinkThreshold
self._buf = ''
self._bufList = []
self._pos = 0 # Current read position.
self._avail = 0 # Number of bytes currently available.
self._eof = False # True when server has sent EOF notification.
def _shrinkBuffer(self):
"""Gets rid of already read data (since we can't rewind)."""
if self._pos >= self._shrinkThreshold:
self._buf = self._buf[self._pos:]
self._avail -= self._pos
self._pos = 0
assert self._avail >= 0
def _waitForData(self):
"""Waits for more data to become available."""
self._conn.process_input()
def read(self, n=-1):
if self._pos == self._avail and self._eof:
return ''
while True:
if n < 0 or (self._avail - self._pos) < n:
# Not enough data available.
if self._eof:
# And there's no more coming.
newPos = self._avail
break
else:
# Wait for more data.
self._waitForData()
continue
else:
newPos = self._pos + n
break
# Merge buffer list, if necessary.
if self._bufList:
self._buf += ''.join(self._bufList)
self._bufList = []
r = self._buf[self._pos:newPos]
self._pos = newPos
self._shrinkBuffer()
return r
def readline(self, length=None):
if self._pos == self._avail and self._eof:
return ''
while True:
# Unfortunately, we need to merge the buffer list early.
if self._bufList:
self._buf += ''.join(self._bufList)
self._bufList = []
# Find newline.
i = self._buf.find('\n', self._pos)
if i < 0:
# Not found?
if self._eof:
# No more data coming.
newPos = self._avail
break
else:
if length is not None and len(self._buf) >= length + self._pos:
newPos = self._pos + length
break
# Wait for more to come.
self._waitForData()
continue
else:
newPos = i + 1
break
r = self._buf[self._pos:newPos]
self._pos = newPos
self._shrinkBuffer()
return r
def readlines(self, sizehint=0):
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def __iter__(self):
return self
def next(self):
r = self.readline()
if not r:
raise StopIteration
return r
def add_data(self, data):
if not data:
self._eof = True
else:
self._bufList.append(data)
self._avail += len(data)
class OutputStream(object):
"""
FastCGI output stream (FCGI_STDOUT/FCGI_STDERR). By default, calls to
write() or writelines() immediately result in Records being sent back
to the server. Buffering should be done in a higher level!
"""
def __init__(self, conn, req, type, buffered=False):
self._conn = conn
self._req = req
self._type = type
self._buffered = buffered
self._bufList = [] # Used if buffered is True
self.dataWritten = False
self.closed = False
def _write(self, data):
length = len(data)
while length:
to_write = min(length, self._req.server.maxwrite - FCGI_HEADER_LEN)
rec = Record(self._type, self._req.requestId)
rec.contentLength = to_write
rec.contentData = data[:to_write]
self._conn.writeRecord(rec)
data = data[to_write:]
length -= to_write
def write(self, data):
assert not self.closed
if not data:
return
self.dataWritten = True
if self._buffered:
self._bufList.append(data)
else:
self._write(data)
def writelines(self, lines):
assert not self.closed
for line in lines:
self.write(line)
def flush(self):
# Only need to flush if this OutputStream is actually buffered.
if self._buffered:
data = ''.join(self._bufList)
self._bufList = []
self._write(data)
# Though available, the following should NOT be called by WSGI apps.
def close(self):
"""Sends end-of-stream notification, if necessary."""
if not self.closed and self.dataWritten:
self.flush()
rec = Record(self._type, self._req.requestId)
self._conn.writeRecord(rec)
self.closed = True
class TeeOutputStream(object):
"""
Simple wrapper around two or more output file-like objects that copies
written data to all streams.
"""
def __init__(self, streamList):
self._streamList = streamList
def write(self, data):
for f in self._streamList:
f.write(data)
def writelines(self, lines):
for line in lines:
self.write(line)
def flush(self):
for f in self._streamList:
f.flush()
class StdoutWrapper(object):
"""
Wrapper for sys.stdout so we know if data has actually been written.
"""
def __init__(self, stdout):
self._file = stdout
self.dataWritten = False
def write(self, data):
if data:
self.dataWritten = True
self._file.write(data)
def writelines(self, lines):
for line in lines:
self.write(line)
def __getattr__(self, name):
return getattr(self._file, name)
def decode_pair(s, pos=0):
"""
Decodes a name/value pair.
The number of bytes decoded as well as the name/value pair
are returned.
"""
nameLength = ord(s[pos])
if nameLength & 128:
nameLength = struct.unpack('!L', s[pos:pos + 4])[0] & 0x7fffffff
pos += 4
else:
pos += 1
valueLength = ord(s[pos])
if valueLength & 128:
valueLength = struct.unpack('!L', s[pos:pos + 4])[0] & 0x7fffffff
pos += 4
else:
pos += 1
name = s[pos:pos + nameLength]
pos += nameLength
value = s[pos:pos + valueLength]
pos += valueLength
return pos, (name, value)
def encode_pair(name, value):
"""
Encodes a name/value pair.
The encoded string is returned.
"""
nameLength = len(name)
if nameLength < 128:
s = chr(nameLength)
else:
s = struct.pack('!L', nameLength | 0x80000000L)
valueLength = len(value)
if valueLength < 128:
s += chr(valueLength)
else:
s += struct.pack('!L', valueLength | 0x80000000L)
return s + name + value
class Record(object):
"""
A FastCGI Record.
Used for encoding/decoding records.
"""
def __init__(self, type=FCGI_UNKNOWN_TYPE, requestId=FCGI_NULL_REQUEST_ID):
self.version = FCGI_VERSION_1
self.type = type
self.requestId = requestId
self.contentLength = 0
self.paddingLength = 0
self.contentData = ''
def _recvall(stream, length):
"""
Attempts to receive length bytes from a socket, blocking if necessary.
(Socket may be blocking or non-blocking.)
"""
if __dbg__: logging.debug('_recvall (%d)' % length)
dataList = []
recvLen = 0
while length:
data = stream.read(length)
if not data: # EOF
break
dataList.append(data)
dataLen = len(data)
recvLen += dataLen
length -= dataLen
# if __dbg__: logging.debug('recived length = %d' % (recvLen))
return ''.join(dataList), recvLen
_recvall = staticmethod(_recvall)
def read(self, stream):
"""Read and decode a Record from a socket."""
try:
header, length = self._recvall(stream, FCGI_HEADER_LEN)
except:
raise
if length < FCGI_HEADER_LEN:
raise EOFError
if __dbg__:
hx = ''
for s in header:
hx += '%x|' % (ord(s))
self.version, self.type, self.requestId, self.contentLength, \
self.paddingLength = struct.unpack(FCGI_Header, header)
if __dbg__:
logging.debug('recv fcgi header: %s %s len: %d' % (
FCGI_HEADER_NAMES[self.type] if self.type is not None and self.type < FCGI_MAXTYPE else
FCGI_HEADER_NAMES[
FCGI_MAXTYPE], hx, len(header)))
if self.contentLength:
try:
self.contentData, length = self._recvall(stream, self.contentLength)
except:
raise EOFError
if length < self.contentLength:
raise EOFError
if self.paddingLength:
try:
self._recvall(stream, self.paddingLength)
except:
raise EOFError
def _sendall(stream, data):
"""
Writes data to a socket and does not return until all the data is sent.
"""
if __dbg__: logging.debug('_sendall: len=%d' % len(data))
stream.write(data)
_sendall = staticmethod(_sendall)
def write(self, stream):
"""Encode and write a Record to a socket."""
if not self.contentLength:
self.paddingLength = 8
else:
self.paddingLength = -self.contentLength & 7
header = struct.pack(FCGI_Header, self.version, self.type,
self.requestId, self.contentLength,
self.paddingLength)
if __dbg__: logging.debug('send fcgi header: %s' % FCGI_HEADER_NAMES[
self.type] if self.type is not None and self.type < FCGI_MAXTYPE else FCGI_HEADER_NAMES[FCGI_MAXTYPE])
self._sendall(stream, header)
if self.contentLength:
if __dbg__: logging.debug('send CONTENT')
self._sendall(stream, self.contentData)
if self.paddingLength:
if __dbg__: logging.debug('send PADDING')
self._sendall(stream, '\x00' * self.paddingLength)
class Request(object):
"""
Represents a single FastCGI request.
These objects are passed to your handler and is the main interface
between your handler and the fcgi module. The methods should not
be called by your handler. However, server, params, stdin, stdout,
stderr, and data are free for your handler's use.
"""
def __init__(self, conn, inputStreamClass):
self._conn = conn
self.server = conn.server
self.params = {}
self.stdin = inputStreamClass(conn)
self.stdout = OutputStream(conn, self, FCGI_STDOUT)
self.stderr = OutputStream(conn, self, FCGI_STDERR)
self.data = inputStreamClass(conn)
def run(self):
"""Runs the handler, flushes the streams, and ends the request."""
try:
protocolStatus, appStatus = self.server.handler(self)
except Exception, instance:
if __dbg__:
logging.error(traceback.format_exc())
raise
# TODO: fix it
# self.stderr.flush()
# if not self.stdout.dataWritten:
# self.server.error(self)
# protocolStatus, appStatus = FCGI_REQUEST_COMPLETE, 0
if __dbg__:
logging.debug('protocolStatus = %d, appStatus = %d' % (protocolStatus, appStatus))
self._flush()
self._end(appStatus, protocolStatus)
def _end(self, appStatus=0L, protocolStatus=FCGI_REQUEST_COMPLETE):
self._conn.end_request(self, appStatus, protocolStatus)
def _flush(self):
self.stdout.flush()
self.stderr.flush()
class Connection(object):
"""
A Connection with the web server.
Each Connection is associated with a single socket (which is
connected to the web server) and is responsible for handling all
the FastCGI message processing for that socket.
"""
_multiplexed = False
_inputStreamClass = InputStream
def __init__(self, stdin, stdout, server):
self._stdin = stdin
self._stdout = stdout
self.server = server
# Active Requests for this Connection, mapped by request ID.
self._requests = {}
def run(self):
"""Begin processing data from the socket."""
self._keepGoing = True
while self._keepGoing:
try:
self.process_input()
except KeyboardInterrupt:
break
# except EOFError, inst:
# raise
# if __dbg__: logging.error(str(inst))
# break
def process_input(self):
"""Attempt to read a single Record from the socket and process it."""
# Currently, any children Request threads notify this Connection
# that it is no longer needed by closing the Connection's socket.
# We need to put a timeout on select, otherwise we might get
# stuck in it indefinitely... (I don't like this solution.)
if not self._keepGoing:
return
rec = Record()
rec.read(self._stdin)
if rec.type == FCGI_GET_VALUES:
self._do_get_values(rec)
elif rec.type == FCGI_BEGIN_REQUEST:
self._do_begin_request(rec)
elif rec.type == FCGI_ABORT_REQUEST:
self._do_abort_request(rec)
elif rec.type == FCGI_PARAMS:
self._do_params(rec)
elif rec.type == FCGI_STDIN:
self._do_stdin(rec)
elif rec.type == FCGI_DATA:
self._do_data(rec)
elif rec.requestId == FCGI_NULL_REQUEST_ID:
self._do_unknown_type(rec)
else:
# Need to complain about this.
pass
def writeRecord(self, rec):
"""
Write a Record to the socket.
"""
rec.write(self._stdout)
def end_request(self, req, appStatus=0L, protocolStatus=FCGI_REQUEST_COMPLETE, remove=True):
"""
End a Request.
Called by Request objects. An FCGI_END_REQUEST Record is
sent to the web server. If the web server no longer requires
the connection, the socket is closed, thereby ending this
Connection (run() returns).
"""
if not req.aborted:
# write empty packet to stdin
rec = Record(FCGI_STDOUT, req.requestId)
rec.contentData = ''
rec.contentLength = 0
self.writeRecord(rec)
# write end request
rec = Record(FCGI_END_REQUEST, req.requestId)
rec.contentData = struct.pack(FCGI_EndRequestBody, appStatus,
protocolStatus)
rec.contentLength = FCGI_EndRequestBody_LEN
self.writeRecord(rec)
if remove:
if __dbg__: logging.debug('end_request: removing request from list')
del self._requests[req.requestId]
if __dbg__: logging.debug('end_request: flags = %d' % req.flags)
if not (req.flags & FCGI_KEEP_CONN) and not self._requests:
if __dbg__: logging.debug('end_request: set _keepGoing = False')
self._keepGoing = False
def _do_get_values(self, inrec):
"""Handle an FCGI_GET_VALUES request from the web server."""
outrec = Record(FCGI_GET_VALUES_RESULT)
pos = 0
while pos < inrec.contentLength:
pos, (name, value) = decode_pair(inrec.contentData, pos)
cap = self.server.capability.get(name)
if cap is not None:
outrec.contentData += encode_pair(name, str(cap))
outrec.contentLength = len(outrec.contentData)
self.writeRecord(outrec)
def _do_begin_request(self, inrec):
"""Handle an FCGI_BEGIN_REQUEST from the web server."""
role, flags = struct.unpack(FCGI_BeginRequestBody, inrec.contentData)
req = self.server.request_class(self, self._inputStreamClass)
req.requestId, req.role, req.flags = inrec.requestId, role, flags
req.aborted = False
if not self._multiplexed and self._requests:
# Can't multiplex requests.
self.end_request(req, 0L, FCGI_CANT_MPX_CONN, remove=False)
else:
self._requests[inrec.requestId] = req
def _do_abort_request(self, inrec):
"""
Handle an FCGI_ABORT_REQUEST from the web server.
We just mark a flag in the associated Request.
"""
req = self._requests.get(inrec.requestId)
if req is not None:
req.aborted = True
self.end_request(req, FCGI_REQUEST_COMPLETE, 0)
def _start_request(self, req):
"""Run the request."""
# Not multiplexed, so run it inline.
req.run()
def _do_params(self, inrec):
"""
Handle an FCGI_PARAMS Record.
If the last FCGI_PARAMS Record is received, start the request.
"""
req = self._requests.get(inrec.requestId)
if req is not None:
if inrec.contentLength:
pos = 0
while pos < inrec.contentLength:
pos, (name, value) = decode_pair(inrec.contentData, pos)
req.params[name] = value
def _do_stdin(self, inrec):
"""Handle the FCGI_STDIN stream."""
req = self._requests.get(inrec.requestId)
if inrec.contentLength:
if req is not None:
req.stdin.add_data(inrec.contentData)
else:
self._start_request(req)
def _do_data(self, inrec):
"""Handle the FCGI_DATA stream."""
req = self._requests.get(inrec.requestId)
if req is not None:
req.data.add_data(inrec.contentData)
def _do_unknown_type(self, inrec):
"""Handle an unknown request type. Respond accordingly."""
outrec = Record(FCGI_UNKNOWN_TYPE)
outrec.contentData = struct.pack(FCGI_UnknownTypeBody, inrec.type)
outrec.contentLength = FCGI_UnknownTypeBody_LEN
self.writeRecord(outrec)
class FCGIServer(object):
request_class = Request
maxwrite = 8192
inputStreamShrinkThreshold = 102400 - 8192
def __init__(self, application, environ=None,
multithreaded=False, multiprocess=False,
debug=False, roles=(FCGI_RESPONDER,),
app_root=None):
if environ is None:
environ = {}
self.application = application
self.environ = environ
self.multithreaded = multithreaded
self.multiprocess = multiprocess
self.debug = debug
self.roles = roles
self._connectionClass = Connection
self.capability = {
# If threads aren't available, these are pretty much correct.
FCGI_MAX_CONNS: 1,
FCGI_MAX_REQS: 1,
FCGI_MPXS_CONNS: 0
}
self.app_root = app_root
def run(self):
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
stdin = sys.stdin
stdout = os.fdopen(sys.stdin.fileno(), 'w', 0)
conn = Connection(stdin, stdout, self)
conn.run()
def handler(self, req):
"""Special handler for WSGI."""
if req.role not in self.roles:
return FCGI_UNKNOWN_ROLE, 0
# Mostly taken from example CGI gateway.
environ = req.params
environ.update(self.environ)
environ['wsgi.version'] = (1, 0)
environ['wsgi.input'] = req.stdin
stderr = TeeOutputStream((sys.stderr, req.stderr))
environ['wsgi.errors'] = stderr
environ['wsgi.multithread'] = False
environ['wsgi.multiprocess'] = False
environ['wsgi.run_once'] = False
if environ.get('HTTPS', 'off') in ('on', '1'):
environ['wsgi.url_scheme'] = 'https'
else:
environ['wsgi.url_scheme'] = 'http'
self._sanitizeEnv(environ)
headers_set = []
headers_sent = []
result = None
def write(data):
assert type(data) is str, 'write() argument must be string'
assert headers_set, 'write() before start_response()'
if not headers_sent:
status, responseHeaders = headers_sent[:] = headers_set
found = False
for header, value in responseHeaders:
if header.lower() == 'content-length':
found = True
break
if not found and result is not None:
try:
if len(result) == 1:
responseHeaders.append(('Content-Length',
str(len(data))))
except:
pass
s = 'Status: %s\r\n' % status
for header in responseHeaders:
s += '%s: %s\r\n' % header
s += '\r\n'
req.stdout.write(s)
req.stdout.write(data)
req.stdout.flush()
def start_response(status, response_headers, exc_info=None):
if exc_info:
try:
if headers_sent:
# Re-raise if too late
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None # avoid dangling circular ref
else:
assert not headers_set, 'Headers already set!'
assert type(status) is str, 'Status must be a string'
assert len(status) >= 4, 'Status must be at least 4 characters'
assert int(status[:3]), 'Status must begin with 3-digit code'
assert status[3] == ' ', 'Status must have a space after code'
assert type(response_headers) is list, 'Headers must be a list'
if __dbg__:
logging.debug('response headers:')
for name, val in response_headers:
assert type(name) is str, 'Header name "%s" must be a string' % name
assert type(val) is str, 'Value of header "%s" must be a string' % name
logging.debug('%s: %s' % (name, val))
headers_set[:] = [status, response_headers]
return write
try:
try:
result = self.application(environ, start_response)
try:
for data in result:
if data:
write(data)
if not headers_sent:
write('') # in case body was empty
finally:
# if hasattr(result, 'close'):
# result.close()
pass
# except socket.error, e:
# if e[0] != errno.EPIPE:
# raise # Don't let EPIPE propagate beyond server
except:
raise
finally:
pass
return FCGI_REQUEST_COMPLETE, 0
def _sanitizeEnv(self, environ):
"""Ensure certain values are present, if required by WSGI."""
if __dbg__:
logging.debug('raw envs: {0}'.format(environ))
environ['SCRIPT_NAME'] = ''
reqUri = None
if environ.has_key('REQUEST_URI'):
reqUri = environ['REQUEST_URI'].split('?', 1)
if not environ.has_key('PATH_INFO') or not environ['PATH_INFO']:
if reqUri is not None:
environ['PATH_INFO'] = reqUri[0]
else:
environ['PATH_INFO'] = ''
# convert %XX to python unicode
environ['PATH_INFO'] = urllib.unquote(environ['PATH_INFO'])
# process app_root
if self.app_root and environ['PATH_INFO'].startswith(self.app_root):
environ['PATH_INFO'] = environ['PATH_INFO'][len(self.app_root):]
environ['SCRIPT_NAME'] = self.app_root
if not environ.has_key('QUERY_STRING') or not environ['QUERY_STRING']:
if reqUri is not None and len(reqUri) > 1:
environ['QUERY_STRING'] = reqUri[1]
else:
environ['QUERY_STRING'] = ''
# If any of these are missing, it probably signifies a broken
# server...
for name, default in [('REQUEST_METHOD', 'GET'),
('SERVER_NAME', 'localhost'),
('SERVER_PORT', '80'),
('SERVER_PROTOCOL', 'HTTP/1.0')]:
if not environ.has_key(name):
environ['wsgi.errors'].write('%s: missing FastCGI param %s '
'required by WSGI!\n' %
(self.__class__.__name__, name))
environ[name] = default
def error(self, req):
"""
Called by Request if an exception occurs within the handler. May and
should be overridden.
"""
if self.debug:
import cgitb
req.stdout.write('Status: 500 Internal Server Error\r\n' +
'Content-Type: text/html\r\n\r\n' +
cgitb.html(sys.exc_info()))
else:
errorpage = """<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html><head>
<title>Unhandled Exception</title>
</head><body>
<h1>Unhandled Exception</h1>
<p>An unhandled exception was thrown by the application.</p>
</body></html>
"""
req.stdout.write('Status: 500 Internal Server Error\r\n' +
'Content-Type: text/html\r\n\r\n' +
errorpage)
def example_application(environ, start_response):
"""example wsgi app which outputs wsgi environment"""
logging.debug('wsgi app started')
data = ''
env_keys = environ.keys()
env_keys.sort()
for e in env_keys:
data += '%s: %s\n' % (e, environ[e])
data += 'sys.version: ' + sys.version + '\n'
start_response('200 OK', [('Content-Type', 'text/plain'), ('Content-Length', str(len(data)))])
yield data
def run_example_app():
if __dbg__: logging.info('run_fcgi: STARTED')
FCGIServer(example_application).run()
if __dbg__: logging.info('run_fcgi: EXITED')
def run_django_app(django_settings_module, django_root):
"""run django app by django_settings_module, django_settings_module can be python path or physical path """
if os.path.exists(django_settings_module):
# this is physical path
app_path, app_settings = os.path.split(django_settings_module)
# add directory to PYTHONPATH
app_dir = os.path.dirname(app_path)
if app_dir not in sys.path:
sys.path.append(app_dir)
if __dbg__: logging.debug('%s added to PYTHONPATH' % app_dir)
# cut .py extension in module
if app_settings.endswith('.py'):
app_settings = app_settings[:-3]
# get python path to settings
settings_module = '%s.%s' % (os.path.basename(app_path), app_settings)
else:
# consider that django_settings_module is valid python path
settings_module = django_settings_module
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
if __dbg__: logging.info('DJANGO_SETTINGS_MODULE set to %s' % settings_module)
try:
from django.core.handlers.wsgi import WSGIHandler
FCGIServer(WSGIHandler(), app_root=django_root).run()
except ImportError:
if __dbg__: logging.error(
'Could not import django.core.handlers.wsgi module. Check that django is installed and in PYTHONPATH.')
raise
def run_wsgi_app(wsgi_app_path, django_root):
try:
wsgi_app = import_function(wsgi_app_path)
except:
if __dbg__:
logging.error('Could not import WSGI APP: {0}'.format(wsgi_app_path))
raise
FCGIServer(wsgi_app, app_root=django_root).run()
def import_function(func_path):
parts = func_path.split('.')
module = ".".join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
if __name__ == '__main__':
# parse options
usage = "usage: %prog [options]"
parser = OptionParser(usage)
parser.add_option("", "--django-settings-module", dest="django_settings_module",
help="The FQPN or physical path of Django settings module")
parser.add_option("", "--django-root", dest="django_root",
help="strip this string from the front of any URLs before matching them against your URLconf patterns")
parser.add_option("", "--wsgi-app", dest="wsgi_app",
help="The FQPN of a WSGI application object to serve")
parser.add_option("", "--debug", dest="debug", action="store_true",
help="Enables debug logging")
parser.set_defaults(
django_settings_module=os.environ.get('DJANGO_SETTINGS_MODULE', None),
django_root=os.environ.get('django.root', None),
wsgi_app=os.environ.get('WSGI_APP', None),
debug=os.environ.get('ZOOFCGI_DEBUG', False),
)
(options, args) = parser.parse_args()
__dbg__ = options.debug
# compile self
compiled = os.path.split(__file__)[-1].replace('.py', '.pyc' if __dbg__ else '.pyo')
if not os.path.exists(compiled):
import py_compile
try:
py_compile.compile(__file__)
except:
pass
# enable logging
if __dbg__:
logging.basicConfig(
filename=os.path.join(os.path.dirname(__file__), '_zoofcgi_%s_%d.log' % (
datetime.datetime.now().strftime('%y%m%d_%H%M%S'), os.getpid())),
filemode='w',
format='%(asctime)s [%(levelname)-5s] %(message)s',
level=logging.DEBUG)
if options.django_settings_module:
# check django app by DJANGO_SETTINGS_MODULE
run_django_app(options.django_settings_module, options.django_root)
elif options.wsgi_app:
# run general WSGI app by WSGI_APP
run_wsgi_app(options.wsgi_app, options.django_root)
else:
# run example app
run_example_app()
|
alexsilva/helicon-zoofcgi
|
zoofcgi.py
|
Python
|
mit
| 34,047 | 0.001351 |
# -*- coding: utf-8 -*-
# Pluggable PayPal NVP (Name Value Pair) API implementation for Django.
# This file includes the PayPal driver class that maps NVP API methods to such
# simple functions.
# Feel free to distribute, modify or use any open or closed project without
# any permission.
# Author: Ozgur Vatansever
# Email: ozgurvt@gmail.com
from cgi import parse_qs
import urllib
import urllib2
from django.conf import settings
# Exception messages
TOKEN_NOT_FOUND_ERROR = (
"PayPal error occured. There is no TOKEN info to finish performing PayPal "
"payment process. We haven't charged your money yet."
)
NO_PAYERID_ERROR = (
"PayPal error occured. There is no PAYERID info to finish performing "
"PayPal payment process. We haven't charged your money yet."
)
GENERIC_PAYPAL_ERROR = (
"There occured an error while performing PayPal checkout process. We "
"apologize for the inconvenience. We haven't charged your money yet."
)
GENERIC_PAYMENT_ERROR = (
"Transaction failed. Check out your order details again."
)
GENERIC_REFUND_ERROR = (
"An error occured, we can not perform your refund request"
)
class PayPal(object):
"""
Pluggable Python PayPal Driver that implements NVP (Name Value Pair) API
methods.
There are simply 3 main methods to be executed in order to finish the
PayPal payment process.
Those are:
1) SetExpressCheckout
2) GetExpressCheckoutDetails (optional)
3) DoExpressCheckoutPayment
:param username: The PayPal username
:type username: string
:param password: The PayPal password
:type password: string
:param signature: The PayPal signature
:type signature: string
"""
def __init__(
self,
username=None,
password=None,
signature=None
):
self.credientials = {
"USER": username or getattr(settings, "PAYPAL_USER", None),
"PWD": password or getattr(settings, "PAYPAL_PASSWORD", None),
"SIGNATURE": signature or getattr(
settings, "PAYPAL_SIGNATURE", None
),
"VERSION": "53.0",
}
# Second step is to set the API end point and redirect urls correctly.
if getattr(settings, "PAYPAL_DEBUG", False):
self.NVP_API_ENDPOINT = "https://api-3t.sandbox.paypal.com/nvp"
self.PAYPAL_REDIRECT_URL = (
"https://www.sandbox.paypal.com/cgi-bin/webscr?"
"cmd=_express-checkout&token="
)
else:
self.NVP_API_ENDPOINT = "https://api-3t.paypal.com/nvp"
self.PAYPAL_REDIRECT_URL = (
"https://www.paypal.com/cgi-bin/webscr?"
"cmd=_express-checkout&token="
)
# initialization
self.signature = urllib.urlencode(self.credientials) + '&'
self.setexpresscheckouterror = None
self.getexpresscheckoutdetailserror = None
self.doexpresscheckoutpaymenterror = None
self.refundtransactionerror = None
self.apierror = None
self.api_response = None
self.token = None
self.response = None
self.refund_response = None
def _get_value_from_qs(self, qs, value):
"""
Gets a value from a querystring dict
This is a private helper function, so DO NOT call this explicitly.
"""
raw = qs.get(value)
if type(raw) == list:
try:
return raw[0]
except KeyError:
return None
else:
return raw
def paypal_url(self, token=None):
"""
Returns a 'redirect url' for PayPal payments.
If token was null, this function MUST NOT return any URL.
"""
token = token if token is not None else self.token
if not token:
return None
return self.PAYPAL_REDIRECT_URL + token
def SetExpressCheckout(
self,
amount,
currency,
return_url,
cancel_url,
**kwargs
):
"""
To set up an Express Checkout transaction, you must invoke the
SetExpressCheckout API to provide sufficient information to initiate
the payment flow and redirect to PayPal if the operation was successful
@currency: Look at 'https://cms.paypal.com/us/cgi-bin/?
cmd=_render-content&content_ID=developer/
e_howto_api_nvp_currency_codes'
@amount: should be string with the following format '10.00'
@return_url: should be in the format
scheme://hostname[:uri (optional)]
@cancel_url: should be in the format
scheme://hostname[:uri (optional)]
@returns bool
If you want to add extra parameters, you can define them in **kwargs
dict. For instance:
- SetExpressCheckout(
10.00,
US,
http://www.test.com/cancel/,
http://www.test.com/return/,
**{'SHIPTOSTREET': 'T Street', 'SHIPTOSTATE': 'T State'}
)
"""
parameters = {
'METHOD': 'SetExpressCheckout',
'NOSHIPPING': 1,
'PAYMENTACTION': 'Sale',
'RETURNURL': return_url,
'CANCELURL': cancel_url,
'AMT': amount,
'CURRENCYCODE': currency,
}
parameters.update(kwargs)
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_dict = parse_qs(response)
self.api_response = response_dict
state = self._get_value_from_qs(response_dict, "ACK")
if state in ["Success", "SuccessWithWarning"]:
self.token = self._get_value_from_qs(response_dict, "TOKEN")
return True
self.setexpresscheckouterror = GENERIC_PAYPAL_ERROR
self.apierror = self._get_value_from_qs(
response_dict, "L_LONGMESSAGE0"
)
return False
"""
If SetExpressCheckout is successfull use TOKEN to redirect to the browser
to the address BELOW:
- https://www.sandbox.paypal.com/cgi-bin/webscr?
cmd=_express-checkout&token=TOKEN (for development only URL)
"""
def GetExpressCheckoutDetails(self, token):
"""
This method performs the NVP API method that is responsible from
getting the payment details. This returns True if successfully fetch
the checkout details, otherwise returns False.
All of the parameters are REQUIRED.
@returns bool
"""
token = self.token if token is None else token
if token is None:
self.getexpresscheckoutdetails = TOKEN_NOT_FOUND_ERROR
return False
parameters = {
'METHOD': "GetExpressCheckoutDetails",
'TOKEN': token,
}
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_dict = parse_qs(response)
self.api_response = response_dict
state = self._get_value_from_qs(response_dict, "ACK")
if not state in ["Success", "SuccessWithWarning"]:
self.getexpresscheckoutdetailserror = self._get_value_from_qs(
response_dict, "L_SHORTMESSAGE0"
)
self.apierror = self.getexpresscheckoutdetailserror
return False
return True
def DoExpressCheckoutPayment(
self,
currency,
amount,
token=None,
payerid=None,
**kwargs
):
"""
This method performs the NVP API method that is responsible from doing
the actual payment.
All of the parameters are REQUIRED.
@currency: Look at 'https://cms.paypal.com/us/cgi-bin/?
cmd=_render-content&content_ID=developer/
e_howto_api_nvp_currency_codes'
@amount : should be string with the following format '10.00'
@token : token that will come from the result of SetExpressionCheckout
process.
@payerid : payerid that will come from the url when PayPal redirects
you after SetExpressionCheckout process.
@returns bool
"""
if token is None:
self.doexpresscheckoutpaymenterror = TOKEN_NOT_FOUND_ERROR
return False
if payerid is None:
self.doexpresscheckoutpaymenterror = NO_PAYERID_ERROR
return False
parameters = {
'METHOD': "DoExpressCheckoutPayment",
'PAYMENTACTION': 'Sale',
'TOKEN': token,
'AMT': amount,
'CURRENCYCODE': currency,
'PAYERID': payerid,
}
parameters.update(kwargs)
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_tokens = {}
for token in response.split('&'):
response_tokens[token.split("=")[0]] = token.split("=")[1]
for key in response_tokens.keys():
response_tokens[key] = urllib.unquote(response_tokens[key])
state = self._get_value_from_qs(response_tokens, "ACK")
self.response = response_tokens
self.api_response = response_tokens
if not state in ["Success", "SuccessWithWarning"]:
self.doexpresscheckoutpaymenterror = GENERIC_PAYMENT_ERROR
self.apierror = self._get_value_from_qs(
response_tokens, "L_LONGMESSAGE0"
)
return False
return True
def DoCapture(
self,
currency,
amount,
authorizationid,
complete=False,
**kwargs
):
if complete:
complete_type = 'Complete'
else:
complete_type = 'NotComplete'
parameters = {
'METHOD': 'DoCapture',
'AUTHORIZATIONID': authorizationid,
'AMT': amount,
'CURRENCYCODE': currency,
'COMPLETETYPE': complete_type
}
parameters.update(kwargs)
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_tokens = {}
for token in response.split('&'):
response_tokens[token.split("=")[0]] = token.split("=")[1]
for key in response_tokens.keys():
response_tokens[key] = urllib.unquote(response_tokens[key])
state = self._get_value_from_qs(response_tokens, "ACK")
self.response = response_tokens
self.api_response = response_tokens
if not state in ["Success", "SuccessWithWarning"]:
self.doexpresscheckoutpaymenterror = GENERIC_PAYMENT_ERROR
self.apierror = self._get_value_from_qs(
response_tokens, "L_LONGMESSAGE0"
)
return False
return True
def DoVoid(
self,
authorizationid,
**kwargs
):
parameters = {
'METHOD': 'DoVoid',
'AUTHORIZATIONID': authorizationid,
}
parameters.update(kwargs)
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_tokens = {}
for token in response.split('&'):
response_tokens[token.split("=")[0]] = token.split("=")[1]
for key in response_tokens.keys():
response_tokens[key] = urllib.unquote(response_tokens[key])
state = self._get_value_from_qs(response_tokens, "ACK")
self.response = response_tokens
self.api_response = response_tokens
if not state in ["Success", "SuccessWithWarning"]:
self.doexpresscheckoutpaymenterror = GENERIC_PAYMENT_ERROR
self.apierror = self._get_value_from_qs(
response_tokens, "L_LONGMESSAGE0"
)
return False
return True
def RefundTransaction(
self,
transid,
refundtype,
currency=None,
amount=None,
note="Refund"
):
"""
Performs PayPal API method for refund.
@refundtype: 'Full' or 'Partial'
Possible Responses:
{'ACK': 'Failure', 'TIMESTAMP': '2009-12-13T09:51:19Z',
'L_SEVERITYCODE0': 'Error', 'L_SHORTMESSAGE0':'Permission denied',
'L_LONGMESSAGE0':
'You do not have permission to refund this transaction',
'VERSION': '53.0',
'BUILD': '1077585', 'L_ERRORCODE0': '10007',
'CORRELATIONID': '3d8fa24c46c65'}
or
{'REFUNDTRANSACTIONID': '9E679139T5135712L', 'FEEREFUNDAMT': '0.70',
'ACK': 'Success', 'TIMESTAMP': '2009-12-13T09:53:06Z',
'CURRENCYCODE': 'AUD', 'GROSSREFUNDAMT': '13.89', 'VERSION': '53.0',
'BUILD': '1077585', 'NETREFUNDAMT': '13.19',
'CORRELATIONID': '6c95d7f979fc1'}
"""
if not refundtype in ["Full", "Partial"]:
self.refundtransactionerror = (
"Wrong parameters given, We can not perform your refund "
"request"
)
return False
parameters = {
'METHOD': "RefundTransaction",
'TRANSACTIONID': transid,
'REFUNDTYPE': refundtype,
}
if refundtype == "Partial":
extra_values = {
'AMT': amount,
'CURRENCYCODE': currency,
'NOTE': note
}
parameters.update(extra_values)
query_string = self.signature + urllib.urlencode(parameters)
response = urllib2.urlopen(self.NVP_API_ENDPOINT, query_string).read()
response_tokens = {}
for token in response.split('&'):
response_tokens[token.split("=")[0]] = token.split("=")[1]
for key in response_tokens.keys():
response_tokens[key] = urllib.unquote(response_tokens[key])
state = self._get_value_from_qs(response_tokens, "ACK")
self.refund_response = response_tokens
self.api_response = response_tokens
if not state in ["Success", "SuccessWithWarning"]:
self.refundtransactionerror = GENERIC_REFUND_ERROR
return False
return True
def GetPaymentResponse(self):
return self.response
def GetRefundResponse(self):
return self.refund_response
|
leepa/django-paypal-driver
|
paypal/driver.py
|
Python
|
gpl-2.0
| 14,675 | 0.000409 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import json
from odoo import api, fields, models, exceptions, _
from odoo.addons.iap import jsonrpc
from requests.exceptions import ConnectionError, HTTPError
from odoo.addons.iap.models.iap import InsufficientCreditError
_logger = logging.getLogger(__name__)
DEFAULT_ENDPOINT = 'https://partner-autocomplete.odoo.com'
class ResPartner(models.Model):
_name = 'res.partner'
_inherit = 'res.partner'
partner_gid = fields.Integer('Company database ID')
additional_info = fields.Char('Additional info')
@api.model
def _replace_location_code_by_id(self, record):
record['country_id'], record['state_id'] = self._find_country_data(
state_code=record.pop('state_code', False),
state_name=record.pop('state_name', False),
country_code=record.pop('country_code', False),
country_name=record.pop('country_name', False)
)
return record
@api.model
def _format_data_company(self, company):
self._replace_location_code_by_id(company)
if company.get('child_ids'):
child_ids = []
for child in company.get('child_ids'):
child_ids.append(self._replace_location_code_by_id(child))
company['child_ids'] = child_ids
if company.get('additional_info'):
company['additional_info'] = json.dumps(company['additional_info'])
return company
@api.model
def _find_country_data(self, state_code, state_name, country_code, country_name):
country = self.env['res.country'].search([['code', '=ilike', country_code]])
if not country:
country = self.env['res.country'].search([['name', '=ilike', country_name]])
state_id = {}
country_id = {}
if country:
country_id = {
'id': country.id,
'display_name': country.display_name
}
if state_name or state_code:
state = self.env['res.country.state'].search([
('country_id', '=', country_id.get('id')),
'|',
('name', '=ilike', state_name),
('code', '=ilike', state_code)
], limit=1)
if state:
state_id = {
'id': state.id,
'display_name': state.display_name
}
else:
_logger.info('Country code not found: %s', country_code)
return country_id, state_id
@api.model
def get_endpoint(self):
url = self.env['ir.config_parameter'].sudo().get_param('iap.partner_autocomplete.endpoint', DEFAULT_ENDPOINT)
url += '/iap/partner_autocomplete'
return url
@api.model
def _rpc_remote_api(self, action, params, timeout=15):
if self.env.registry.in_test_mode() :
return False, 'Insufficient Credit'
url = '%s/%s' % (self.get_endpoint(), action)
account = self.env['iap.account'].get('partner_autocomplete')
params.update({
'db_uuid': self.env['ir.config_parameter'].sudo().get_param('database.uuid'),
'account_token': account.account_token,
'country_code': self.env.user.company_id.country_id.code,
'zip': self.env.user.company_id.zip,
})
try:
return jsonrpc(url=url, params=params, timeout=timeout), False
except (ConnectionError, HTTPError, exceptions.AccessError) as exception:
_logger.error('Autocomplete API error: %s' % str(exception))
return False, str(exception)
except InsufficientCreditError as exception:
_logger.warning('Insufficient Credits for Autocomplete Service: %s' % str(exception))
return False, 'Insufficient Credit'
@api.model
def autocomplete(self, query):
suggestions, error = self._rpc_remote_api('search', {
'query': query,
})
if suggestions:
results = []
for suggestion in suggestions:
results.append(suggestion)
return results
else:
return []
@api.model
def enrich_company(self, company_domain, partner_gid, vat):
response, error = self._rpc_remote_api('enrich', {
'domain': company_domain,
'partner_gid': partner_gid,
'vat': vat,
})
if response and response.get('company_data'):
result = self._format_data_company(response.get('company_data'))
else:
result = {}
if error:
result.update({
'error': True,
'error_message': error
})
return result
@api.model
def read_by_vat(self, vat):
vies_vat_data, error = self._rpc_remote_api('search_vat', {
'vat': vat,
})
if vies_vat_data:
return [self._format_data_company(vies_vat_data)]
else:
return []
@api.model
def _is_company_in_europe(self, country_code):
country = self.env['res.country'].search([('code', '=ilike', country_code)])
if country:
country_id = country.id
europe = self.env.ref('base.europe')
if not europe:
europe = self.env["res.country.group"].search([('name', '=', 'Europe')], limit=1)
if not europe or country_id not in europe.country_ids.ids:
return False
return True
def _is_vat_syncable(self, vat):
vat_country_code = vat[:2]
partner_country_code = self.country_id and self.country_id.code
return self._is_company_in_europe(vat_country_code) and (partner_country_code == vat_country_code or not partner_country_code)
def _is_synchable(self):
already_synched = self.env['res.partner.autocomplete.sync'].search([('partner_id', '=', self.id), ('synched', '=', True)])
return self.is_company and self.partner_gid and not already_synched
def _update_autocomplete_data(self, vat):
self.ensure_one()
if vat and self._is_synchable() and self._is_vat_syncable(vat):
self.env['res.partner.autocomplete.sync'].sudo().add_to_queue(self.id)
@api.model_create_multi
def create(self, vals_list):
partners = super(ResPartner, self).create(vals_list)
if len(vals_list) == 1:
partners._update_autocomplete_data(vals_list[0].get('vat', False))
if partners.additional_info:
partners.message_post_with_view(
'partner_autocomplete.additional_info_template',
values=json.loads(partners.additional_info),
subtype_id=self.env.ref('mail.mt_note').id,
)
partners.write({'additional_info': False})
return partners
@api.multi
def write(self, values):
res = super(ResPartner, self).write(values)
if len(self) == 1:
self._update_autocomplete_data(values.get('vat', False))
return res
|
t3dev/odoo
|
addons/partner_autocomplete/models/res_partner.py
|
Python
|
gpl-3.0
| 7,237 | 0.001935 |
# -*- coding: utf-8 -*-
# This technical data was produced for the U. S. Government under Contract No. W15P7T-13-C-F600, and
# is subject to the Rights in Technical Data-Noncommercial Items clause at DFARS 252.227-7013 (FEB 2012)
from django.contrib.gis.db import models
class AOIManager(models.GeoManager):
def add_filters(self, **kwargs):
"""
Returns the queryset with new filters
"""
return super(AOIManager, self).get_query_set().filter(**kwargs)
def unassigned(self):
"""
Returns unassigned AOIs.
"""
return self.add_filters(status='Unassigned')
def assigned(self):
"""
Returns assigned AOIs.
"""
return self.add_filters(status='Assigned')
def in_work(self):
"""
Returns AOIs in work.
"""
return self.add_filters(status='In Work')
def submitted(self):
"""
Returns submitted AOIs.
"""
return self.add_filters(status='Submitted')
def completed(self):
"""
Returns completed AOIs.
"""
return self.add_filters(status='Completed')
|
stephenrjones/geoq
|
geoq/core/managers.py
|
Python
|
mit
| 1,155 | 0.001732 |
"""Learner that implements a greedy learning algorithm"""
import time
from pebl import network, result, evaluator
from pebl.util import *
from pebl.learner.base import *
class GreedyLearnerStatistics:
def __init__(self):
self.restarts = -1
self.iterations = 0
self.unimproved_iterations = 0
self.best_score = 0
self.start_time = time.time()
@property
def runtime(self):
return time.time() - self.start_time
class GreedyLearner(Learner):
#
# Parameters
#
_params = (
config.IntParameter(
'greedy.max_iterations',
"""Maximum number of iterations to run.""",
default=1000
),
config.IntParameter(
'greedy.max_time',
"""Maximum learner runtime in seconds.""",
default=0
),
config.IntParameter(
'greedy.max_unimproved_iterations',
"""Maximum number of iterations without score improvement before
a restart.""",
default=500
),
config.StringParameter(
'greedy.seed',
'Starting network for a greedy search.',
default=''
)
)
def __init__(self, data_=None, prior_=None, **options):
"""
Create a learner that uses a greedy learning algorithm.
The algorithm works as follows:
1. start with a random network
2. Make a small, local change and rescore network
3. If new network scores better, accept it, otherwise reject.
4. Steps 2-3 are repeated till the restarting_criteria is met, at
which point we begin again with a new random network (step 1)
Any config param for 'greedy' can be passed in via options.
Use just the option part of the parameter name.
For more information about greedy learning algorithms, consult:
1. http://en.wikipedia.org/wiki/Greedy_algorithm
2. D. Heckerman. A Tutorial on Learning with Bayesian Networks.
Microsoft Technical Report MSR-TR-95-06, 1995. p.35.
"""
super(GreedyLearner, self).__init__(data_, prior_)
self.options = options
config.setparams(self, options)
if not isinstance(self.seed, network.Network):
self.seed = network.Network(self.data.variables, self.seed)
def run(self):
"""Run the learner.
Returns a LearnerResult instance. Also sets self.result to that
instance.
"""
# max_time and max_iterations are mutually exclusive stopping critera
if 'max_time' not in self.options:
_stop = self._stop_after_iterations
else:
_stop = self._stop_after_time
self.stats = GreedyLearnerStatistics()
self.result = result.LearnerResult(self)
self.evaluator = evaluator.fromconfig(self.data, self.seed, self.prior)
self.evaluator.score_network(self.seed.copy())
first = True
self.result.start_run()
while not _stop():
self._run_without_restarts(_stop, self._restart,
randomize_net=(not first))
first = False
self.result.stop_run()
return self.result
def _run_without_restarts(self, _stop, _restart, randomize_net=True):
self.stats.restarts += 1
self.stats.unimproved_iterations = 0
if randomize_net:
self.evaluator.randomize_network()
# set the default best score
self.stats.best_score = self.evaluator.score_network()
# continue learning until time to stop or restart
while not (_restart() or _stop()):
self.stats.iterations += 1
try:
curscore = self._alter_network_randomly_and_score()
except CannotAlterNetworkException:
return
self.result.add_network(self.evaluator.network, curscore)
if curscore <= self.stats.best_score:
# score did not improve, undo network alteration
self.stats.unimproved_iterations += 1
self.evaluator.restore_network()
else:
self.stats.best_score = curscore
self.stats.unimproved_iterations = 0
#
# Stopping and restarting criteria
#
def _stop_after_time(self):
return self.stats.runtime >= self.max_time
def _stop_after_iterations(self):
return self.stats.iterations >= self.max_iterations
def _restart(self):
return self.stats.unimproved_iterations >= self.max_unimproved_iterations
|
arnaudsj/pebl
|
src/pebl/learner/greedy.py
|
Python
|
mit
| 4,765 | 0.003568 |
# Copyright 2009 Douglas Mayle
# This file is part of YAMLTrak.
# YAMLTrak is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
# YAMLTrak is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with YAMLTrak. If not, see <http://www.gnu.org/licenses/>.
import os
import textwrap
from termcolor import colored
from yamltrak.argparse import ArgumentParser
from yamltrak import IssueDB, NoRepository, NoIssueDB
def guess_issue_id(issuedb):
related = issuedb.related(detail=True)
if len(related) > 1:
print colored('Too many linked issues found, please specify one.', None, attrs=['reverse'])
for issueid in related:
print colored(textwrap.fill('Issue: %s' % issueid,
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
print colored(textwrap.fill(related[issueid].get('title', '').upper(),
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
import sys
sys.exit(1)
issueid = related.keys()[0]
# Prompt user?
print "Found only one issue."
print colored(textwrap.fill('Issue: %s' % issueid,
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
print colored(textwrap.fill(related[issueid].get('title', '').upper(),
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
verification = raw_input("Do you want to use this issue? (Y/[N]) ")
if verification.lower() in ['y', 'yes', 'yeah', 'oui', 'uh-huh', 'sure', 'why not?', 'meh']:
return issueid
print 'Aborting'
import sys
sys.exit(1)
def unpack_new(issuedb, args):
# We should be able to avoid this somehow by using an object dictionary.
skeleton_new = issuedb.skeleton_new
issue = {}
for field in skeleton_new:
issue[field] = getattr(args, field, None)
if issue[field] is None:
issue[field] = skeleton_new[field]
newid = issuedb.new(issue=issue)
print 'Added new issue: %s' % newid
def unpack_list(issuedb, args):
issues = issuedb.issues(status=args.status)
for id, issue in issues.iteritems():
# Try to use color for clearer output
color = None
if 'high' in issue.get('priority',''):
color = 'red'
elif 'normal' in issue.get('priority',''):
pass
elif 'low' in issue.get('priority',''):
color = 'blue'
else:
color = 'red'
# We'll use status indicators on indent for estimate
if 'long' in issue.get('estimate', {}).get('scale').lower():
indent = '>>>>'
elif 'medium' in issue.get('estimate', {}).get('scale').lower():
indent = '> > '
elif 'short' in issue.get('estimate', {}).get('scale').lower():
indent = '> '
else:
indent = '===='
print colored('Issue: %s' % id, color, attrs=['reverse'])
print colored(textwrap.fill(issue.get('title', '').upper(),
initial_indent=indent, subsequent_indent=indent), color, attrs=[])
# print colored(textwrap.fill(issue.get('description',''),
# initial_indent=indent, subsequent_indent=indent), color)
print colored(textwrap.fill(issue.get('estimate',{}).get('text',''),
initial_indent=indent, subsequent_indent=indent), color)
def unpack_edit(issuedb, args):
if not args.id:
args.id = guess_issue_id(issuedb)
skeleton = issuedb.skeleton
issue = issuedb.issue(id=args.id, detail=False)[0]['data']
newissue = {}
for field in skeleton:
newissue[field] = getattr(args, field, None) or issue.get(field, skeleton[field])
issuedb.edit(id=args.id, issue=newissue)
def unpack_show(issuedb, args):
if not args.id:
args.id = guess_issue_id(issuedb)
issuedata = issuedb.issue(id=args.id, detail=args.detail)
if not issuedata or not issuedata[0].get('data'):
print 'No such issue found'
return
issue = issuedata[0]['data']
print '\nIssue: %s' % args.id
if 'title' in issue:
print textwrap.fill(issue.get('title', '').upper(), initial_indent='', subsequent_indent='')
if 'description' in issue:
print textwrap.fill(issue['description'], initial_indent='', subsequent_indent='')
print ''
for field in sorted(issue.keys()):
if field in ['title', 'description']:
continue
print textwrap.fill('%s: %s' % (field.upper(), issue[field]), initial_indent='', subsequent_indent=' ')
if issue.get('diff'):
for changeset in issue['diff'][0].iteritems():
print 'Added: %s - %s' % (changeset[0].upper(), changeset[1])
for changeset in issue['diff'][1].iteritems():
print 'Removed: %s' % changeset[0].upper()
for changeset in issue['diff'][2].iteritems():
print 'Changed: %s - %s' % (changeset[0].upper(), changeset[1][1])
else:
# No uncommitted changes
pass
for version in issuedata[1:]:
print '\nChangeset: %s' % version['node']
print 'Committed by: %s on %s' % (version['user'], version['date'])
print 'Linked files:'
for filename in version['files']:
print ' %s' % filename
if version.get('diff'):
for changeset in version['diff'][0].iteritems():
print 'Added: %s - %s' % (changeset[0].upper(), changeset[1])
for changeset in version['diff'][1].iteritems():
print 'Removed: %s' % changeset[0].upper()
for changeset in version['diff'][2].iteritems():
print 'Changed: %s - %s' % (changeset[0].upper(), changeset[1][1])
def unpack_related(issuedb, args):
relatedissues = issuedb.related(filenames=args.files, detail=True)
for issueid, issue in relatedissues.iteritems():
print colored(textwrap.fill('Issue: %s' % issueid,
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
print colored(textwrap.fill(issue.get('title', '').upper(),
initial_indent=' ', subsequent_indent=' '), None, attrs=[])
def unpack_dbinit(issuedb, args):
try:
issuedb = IssueDB(args.repository, dbinit=True)
except NoRepository:
# This means that there was no repository here.
print 'Unable to find a repository.'
import sys
sys.exit(1)
except NoIssueDB:
# Whoops
print 'Error initializing issued database'
import sys
sys.exit(1)
print 'Initialized issue database'
def unpack_close(issuedb, args):
if not args.id:
args.id = guess_issue_id(issuedb)
issuedb.close(args.id, args.comment)
def unpack_purge(issuedb, args):
pass
def unpack_burndown(issuedb, args):
pass
def main():
"""Parse the command line options and react to them."""
try:
issuedb = IssueDB(os.getcwd())
except NoRepository:
# This means that there was no repository here.
print 'Unable to find a repository.'
import sys
sys.exit(1)
except NoIssueDB:
# This means no issue database was found. We give the option to
# initialize one.
parser = ArgumentParser(prog='yt', description='YAMLTrak is a distributed version controlled issue tracker.')
subparsers = parser.add_subparsers(help=None, dest='command')
parser_dbinit = subparsers.add_parser('dbinit',
help="Initialize the issue database.")
parser_dbinit.set_defaults(func=unpack_dbinit)
args = parser.parse_args()
# We don't have a valid database, so we call with none.
args.repository = os.getcwd()
args.func(None, args)
return
skeleton = issuedb.skeleton
skeleton_new = issuedb.skeleton_new
parser = ArgumentParser(prog='yt', description='YAMLTrak is a distributed version controlled issue tracker.')
# parser.add_argument('-r', '--repository',
# help='Use this directory as the repository instead of the current '
# 'one.')
# parser.add_argument('-f', '--folder',
# help='Look for issues in this folder, instead of the "issues" folder.')
subparsers = parser.add_subparsers(help=None, dest='command')
# Adding a new issue
parser_new = subparsers.add_parser('new', help="Add a new issue.")
parser_new.set_defaults(func=unpack_new)
for field, help in skeleton.iteritems():
if field not in skeleton_new:
parser_new.add_argument('-' + field[0], '--' + field, help=help)
for field, help in skeleton_new.iteritems():
parser_new.add_argument('-' + field[0], '--' + field, required=True, help=skeleton[field])
# Editing an issue
parser_edit = subparsers.add_parser('edit', help="Edit an issue.")
parser_edit.set_defaults(func=unpack_edit)
for field, help in skeleton.iteritems():
parser_edit.add_argument('-' + field[0], '--' + field, help=help)
parser_edit.add_argument('id', nargs='?', help='The issue id to edit.')
# List all issues
parser_list = subparsers.add_parser('list', help="List all issues.")
parser_list.set_defaults(func=unpack_list)
parser_list.add_argument('-s', '--status', default='open',
help='List all issues with this stats. Defaults to open issues.')
# Show an issue
parser_show = subparsers.add_parser('show', help="Show the details for an "
"issue.")
parser_show.set_defaults(func=unpack_show)
parser_show.add_argument('-d', '--detail', default=False, action='store_true',
help='Show a detailed view of the issue')
parser_show.add_argument('id', nargs='?',
help='The issue id to show the details for.')
# Get issues related to a file
parser_related = subparsers.add_parser('related', help="List the issues "
"related to given files.")
parser_related.set_defaults(func=unpack_related)
parser_related.add_argument( 'files', metavar='file', type=str, nargs='*',
default=[],
help='List the open issues related to these files. If no files are '
'supplied, and the list of currently uncommitted files (excluding '
'issues) will be checked.')
# Initialize DB
parser_dbinit = subparsers.add_parser('dbinit',
help="Initialize the issue database.")
parser_dbinit.set_defaults(func=unpack_dbinit)
# Close an issue
parser_close = subparsers.add_parser('close', help="Close an issue.")
parser_close.add_argument('-c', '--comment', default=None,
help='An optional closing comment to set on the ticket.')
parser_close.set_defaults(func=unpack_close)
parser_close.add_argument('id', nargs='?',
help='The issue id to close.')
# Purge an issue
# parser_purge = subparsers.add_parser('purge', help="Purge an issue.")
# parser_purge.set_defaults(func=unpack_purge)
# ASCII Burndown chart.
# parser_burn = subparsers.add_parser('burn', help="Show a burndown chart "
# "for a group of issues.")
# parser_burn.set_defaults(func=unpack_burndown)
args = parser.parse_args()
args.func(issuedb, args)
if __name__ == '__main__':
main()
|
dmayle/YAMLTrak
|
yamltrak/commands.py
|
Python
|
gpl-3.0
| 11,746 | 0.004001 |
from openflow.optin_manager.sfa.util.method import Method
from openflow.optin_manager.sfa.trust.credential import Credential
from openflow.optin_manager.sfa.util.parameter import Parameter
class Update(Method):
"""
Update an object in the registry. Currently, this only updates the
PLC information associated with the record. The SFA fields (name, type,
GID) are fixed.
@param cred credential string specifying rights of the caller
@param record a record dictionary to be updated
@return 1 if successful, faults otherwise
"""
interfaces = ['registry']
accepts = [
Parameter(dict, "Record dictionary to be updated"),
Parameter(str, "Credential string"),
]
returns = Parameter(int, "1 if successful")
def call(self, record_dict, creds):
# validate the cred
valid_creds = self.api.auth.checkCredentials(creds, "update")
# verify permissions
hrn = record_dict.get('hrn', '')
self.api.auth.verify_object_permission(hrn)
# log
origin_hrn = Credential(string=valid_creds[0]).get_gid_caller().get_hrn()
self.api.logger.info("interface: %s\tcaller-hrn: %s\ttarget-hrn: %s\tmethod-name: %s"%(self.api.interface, origin_hrn, hrn, self.name))
return self.api.manager.Update(self.api, record_dict)
|
dana-i2cat/felix
|
optin_manager/src/python/openflow/optin_manager/sfa/methods/Update.py
|
Python
|
apache-2.0
| 1,376 | 0.009448 |
import click
from images_of import command, settings, Reddit
from images_of.discord_announcer import DiscordBot, DiscordBotSettings
@command
@click.option('-G', '--no-github', is_flag=True, help='Do not process github events')
@click.option('-M', '--no-modlog', is_flag=True, help='Do not process network modlog events')
@click.option('-O', '--no-oc', is_flag=True, help='Do not process network for OC submissions')
@click.option('-I', '--no-inbox', is_flag=True, help='Do not process inbox for messages/replies')
@click.option('-F', '--no-falsepositives', is_flag=True, help='Do not announce false-positive reports')
@click.option('-r', '--run-interval', help='Number of minutes to process items', default=1)
@click.option('-s', '--stats-interval', help='Number of minutes to send stats info', default=15)
def main(no_github, no_modlog, no_oc, no_inbox, no_falsepositives, run_interval, stats_interval):
"""Discord Announcer Bot to relay specified information to designated Discord channels."""
reddit = Reddit('{} Discord Announcer v1.1 - /u/{}'
.format(settings.NETWORK_NAME, settings.USERNAME))
reddit.oauth()
discobot = DiscordBot(reddit)
botsettings = DiscordBotSettings()
botsettings.DO_GITHUB = not no_github
botsettings.DO_MODLOG = not no_modlog
botsettings.DO_OC = not no_oc
botsettings.DO_INBOX = not no_inbox
botsettings.DO_FALSEPOS = not no_falsepositives
botsettings.RUN_INTERVAL = run_interval
botsettings.STATS_INTERVAL = stats_interval
discobot.run(botsettings)
if __name__ == '__main__':
main()
|
scowcron/ImagesOfNetwork
|
images_of/entrypoints/discord_announce_bot.py
|
Python
|
mit
| 1,599 | 0.005629 |
# -*- coding: utf-8 -*-
__author__ = 'Juho Vepsäläinen'
__version__ = '0.7.5-dev'
|
bebraw/speccer
|
speccer/__init__.py
|
Python
|
mit
| 86 | 0.011905 |
import numpy as np
import scipy.optimize as opt
import unittest
def allocate(a, c, initial_guess):
def sum_positives(x):
return sum(i for i in x if i > 0)
def sum_negatives(x):
return -sum(i for i in x if i < 0)
pareto_ref = min(sum_positives(a), sum_negatives(a))
def sqr_sum(o,c):
aux = o + c
return aux.dot(aux)
def bound_n(x):
if x < 0:
return x, 0
else:
return x, x
def bound_p(x):
if x < 0:
return x, x
else:
return 0, x
bound = bound_p if sum_positives(a) > sum_negatives(a) else bound_n
def pareto(x):
return sum_positives(x) + sum_negatives(x) - 2 * pareto_ref
bounds = [bound(i) for i in a]
initial_guess[:] = a # ignore initial_guess using a as initial_guess
solution = opt.minimize(
lambda x: sqr_sum(x, c),
initial_guess,
method='SLSQP',
bounds=bounds,
constraints=({'type': 'eq', 'fun': lambda x: sum(x)},
{'type': 'eq', 'fun': pareto}),
options={'maxiter': 1000, 'disp': False}
)
solution = map(lambda x: 0 if np.isnan(x) else int(round(x)), solution.x)
initial_guess[:] = solution
class TestAllocate(unittest.TestCase):
def test_lack_resource_int_reputation(self): # sum a > 0
desire = [1, 3, 2, -1, -2, 1]
reputation = [-10, -3, -5, 2, 4, 12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, 0, 2, -1, -2, 0])
self.assertEqual(desire, [1, 3, 2, -1, -2, 1])
self.assertEqual(reputation, [-10, -3, -5, 2, 4, 12])
desire = [-1, 3, -2, 1, 2, -1]
reputation = [10, 3, 5, -2, -4, -12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [-1, 1, -2, 1, 2, -1])
desire = [-1, 3, -2, 3, 3, -1]
reputation = [10, 3, 7, -4, -4, -12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [-1, 0, -2, 2, 2, -1])
def test_lack_resources_float_reputation(self):
desire = [1, 3, 2, 2, -3, 1, -5, 3, 0]
reputation = [-6.2, -3.1, -3.1, -2.2, 8.6, 12.2, -4.3, 6.0, -7.9]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, 3, 2, 2, -3, 0, -5, 0, 0])
def test_lack_consumer(self): # sum a < 0
desire = [1, -3, 2, -1, -2, 1]
reputation = [-10, -3, -5, 2, 4, 12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, -1, 2, -1, -2, 1])
def test_efficient(self): # sum a = 0
desire = [1, -3, 2, -1, -2, 3]
reputation = [-10, -3, -5, 2, 4, 12]
output = desire[:]
allocate(desire, reputation, output)
self.assertEqual(output, [1, -3, 2, -1, -2, 3])
if __name__ == "__main__":
unittest.main()
|
hugombarreto/credibility_allocation
|
allocators/optimization_allocation.py
|
Python
|
mit
| 3,025 | 0.000661 |
import falcon
import msgpack
import json
from btree import BinaryTree
import ZODB, ZODB.FileStorage
import transaction
from persistent import Persistent
import uuid
import urllib
import btree
from pprint import pprint
class Collection (object):
def on_post(self, req, resp):
# req.stream corresponds to the WSGI wsgi.input environ variable,
# and allows you to read bytes from the request body.
#
# See also: PEP 3333
if req.content_length in (None, 0):
# Nothing to do
print "nothin"
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
pprint(body)
req.context['doc'] = json.loads(body.decode('utf-8'))
token = str(uuid.uuid4())
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
unordered_list = req.context['doc']['data']
root.tree = BinaryTree(unordered_list.pop())
tree = root.tree
tree.unordered_list = unordered_list
#tree.setList()
if len(unordered_list) <2:
raise falcon.HTTPBadRequest('Empty request body', 'We need more than 2 data elements')
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
tree.current = tree
tree.treeroot = tree.current
tree.next = tree.unordered_list.pop()
tree.ordered = False
tree.jresp = {'remain':tree.unordered_list, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"self":"/order/"},
{'order':'/order/%s'%(urllib.quote(token))},
{'lt':'/order/%s/%s/%s'%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{'gt':'/order/%s/%s/%s'%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
def on_get(self, req, resp, token):
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
if hasattr(root, 'tree'):
tree = root.tree
else:
resp.body = "Initialize first"
connection.close()
db.close()
storage.close()
return
lst = list(btree.inorder(tree))
tree.jresp = {'data':lst, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"new":"/order/"},
{"self":"/order/%s"%(urllib.quote(token))},
{"lt":"/order/%s/%s/%s"%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{"gt":"/order/%s/%s/%s"%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
def on_put(self, req, resp, token):
if req.content_length in (None, 0):
# Nothing to do
return
body = req.stream.read()
if not body:
raise falcon.HTTPBadRequest('Empty request body',
'A valid JSON document is required.')
try:
req.context['doc'] = json.loads(body.decode('utf-8'))
left = req.context['doc']['left']
right = req.context['doc']['right']
except (ValueError, UnicodeDecodeError):
raise falcon.HTTPError(falcon.HTTP_753,
'Malformed JSON',
'Could not decode the request body. The '
'JSON was incorrect or not encoded as '
'UTF-8.')
storage = ZODB.FileStorage.FileStorage('trees/'+token+'.fs')
db = ZODB.DB(storage)
connection = db.open()
root = connection.root
if hasattr(root, 'tree'):
tree = root.tree
else:
resp.body = "Initialize first"
connection.close()
db.close()
storage.close()
return
if tree.next not in [left, right]:
resp.body = json.dumps(tree.jresp)
connection.close()
db.close()
storage.close()
return
if left == tree.current.getNodeValue():
if tree.current.getRightChild() == None:
tree.current.insertRight(right)
tree.current = tree.treeroot
if len(tree.unordered_list)>0:
tree.next = tree.unordered_list.pop()
else:
tree.ordered = True
tree.next = "None"
else:
tree.current = tree.current.getRightChild()
elif right == tree.current.getNodeValue():
if tree.current.getLeftChild()== None:
tree.current.insertLeft(left)
tree.current = tree.treeroot
if len(tree.unordered_list)>0:
tree.next = tree.unordered_list.pop()
else:
tree.ordered = True
tree.next = "None"
else:
tree.current = tree.current.getLeftChild()
tree.jresp = {'remain':tree.unordered_list, 'item':tree.current.getNodeValue(), 'compare':tree.next, 'token':token, 'ordered':tree.ordered,
'links':[{"new":"/order/"},
{"order":"/order/%s"%(urllib.quote(token))},
{"lt":"/order/%s/%s/%s"%(urllib.quote(token), tree.current.getNodeValue(), tree.next)},
{"gt":"/order/%s/%s/%s"%(urllib.quote(token), tree.next, tree.current.getNodeValue())}]}
transaction.commit()
connection.close()
db.close()
storage.close()
resp.body = json.dumps(tree.jresp)
|
AgusRumayor/pypriorapi
|
order.py
|
Python
|
gpl-3.0
| 6,154 | 0.039812 |
import csv
import sys
import codecs
from pattern.en import sentiment
input_, output_ = str(sys.argv[1]), str(sys.argv[2])
with codecs.open('/output.txt', 'w') as fout:
writer = csv.writer(fout, delimiter='\t')
with codecs.open('/input.txt', 'r') as fin:
for l_i, line in enumerate(fin):
line = line.strip()
result = sentiment(line)[0]
prediction = None
if result > 0:
prediction = 1
elif result < 0:
prediction = -1
elif result == 0:
prediction = 0
writer.writerow([l_i, prediction])
|
oaraque/sentiment-analysis
|
pattern/run.py
|
Python
|
mit
| 639 | 0.001565 |
# expression.py
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Michael Bayer mike_mp@zzzcomputing.com
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Defines the base components of SQL expression trees.
All components are derived from a common base class
:class:`ClauseElement`. Common behaviors are organized
based on class hierarchies, in some cases via mixins.
All object construction from this package occurs via functions which
in some cases will construct composite :class:`ClauseElement` structures
together, and in other cases simply return a single :class:`ClauseElement`
constructed directly. The function interface affords a more "DSL-ish"
feel to constructing SQL expressions and also allows future class
reorganizations.
Even though classes are not constructed directly from the outside,
most classes which have additional public methods are considered to be
public (i.e. have no leading underscore). Other classes which are
"semi-public" are marked with a single leading underscore; these
classes usually have few or no public methods and are less guaranteed
to stay the same in future releases.
"""
import itertools, re
from operator import attrgetter
from sqlalchemy import util, exc #, types as sqltypes
from sqlalchemy.sql import operators
from sqlalchemy.sql.visitors import Visitable, cloned_traverse
import operator
functions, sql_util, sqltypes = None, None, None
DefaultDialect = None
__all__ = [
'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between',
'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct',
'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
'literal', 'literal_column', 'not_', 'null', 'or_', 'outparam',
'outerjoin', 'select', 'subquery', 'table', 'text', 'tuple_', 'union',
'union_all', 'update', ]
PARSE_AUTOCOMMIT = util._symbol('PARSE_AUTOCOMMIT')
def desc(column):
"""Return a descending ``ORDER BY`` clause element.
e.g.::
order_by = [desc(table1.mycol)]
"""
return _UnaryExpression(column, modifier=operators.desc_op)
def asc(column):
"""Return an ascending ``ORDER BY`` clause element.
e.g.::
order_by = [asc(table1.mycol)]
"""
return _UnaryExpression(column, modifier=operators.asc_op)
def outerjoin(left, right, onclause=None):
"""Return an ``OUTER JOIN`` clause element.
The returned object is an instance of :class:`Join`.
Similar functionality is also available via the :func:`outerjoin()`
method on any :class:`FromClause`.
left
The left side of the join.
right
The right side of the join.
onclause
Optional criterion for the ``ON`` clause, is derived from
foreign key relationships established between left and right
otherwise.
To chain joins together, use the :func:`join()` or :func:`outerjoin()`
methods on the resulting :class:`Join` object.
"""
return Join(left, right, onclause, isouter=True)
def join(left, right, onclause=None, isouter=False):
"""Return a ``JOIN`` clause element (regular inner join).
The returned object is an instance of :class:`Join`.
Similar functionality is also available via the :func:`join()` method
on any :class:`FromClause`.
left
The left side of the join.
right
The right side of the join.
onclause
Optional criterion for the ``ON`` clause, is derived from
foreign key relationships established between left and right
otherwise.
To chain joins together, use the :func:`join()` or :func:`outerjoin()`
methods on the resulting :class:`Join` object.
"""
return Join(left, right, onclause, isouter)
def select(columns=None, whereclause=None, from_obj=[], **kwargs):
"""Returns a ``SELECT`` clause element.
Similar functionality is also available via the :func:`select()`
method on any :class:`FromClause`.
The returned object is an instance of :class:`Select`.
All arguments which accept :class:`ClauseElement` arguments also accept
string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
:param columns:
A list of :class:`ClauseElement` objects, typically
:class:`ColumnElement` objects or subclasses, which will form the
columns clause of the resulting statement. For all members which are
instances of :class:`Selectable`, the individual :class:`ColumnElement`
members of the :class:`Selectable` will be added individually to the
columns clause. For example, specifying a
:class:`~sqlalchemy.schema.Table` instance will result in all the
contained :class:`~sqlalchemy.schema.Column` objects within to be added
to the columns clause.
This argument is not present on the form of :func:`select()`
available on :class:`~sqlalchemy.schema.Table`.
:param whereclause:
A :class:`ClauseElement` expression which will be used to form the
``WHERE`` clause.
:param from_obj:
A list of :class:`ClauseElement` objects which will be added to the
``FROM`` clause of the resulting statement. Note that "from" objects are
automatically located within the columns and whereclause ClauseElements.
Use this parameter to explicitly specify "from" objects which are not
automatically locatable. This could include
:class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
or :class:`Join` objects whose presence will supercede that of the
:class:`~sqlalchemy.schema.Table` objects already located in the other
clauses.
:param autocommit:
Deprecated. Use .execution_options(autocommit=<True|False>)
to set the autocommit option.
:param prefixes:
a list of strings or :class:`ClauseElement` objects to include
directly after the SELECT keyword in the generated statement,
for dialect-specific query features.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
clause of the resulting statement.
:param use_labels=False:
when ``True``, the statement will be generated using labels
for each column in the columns clause, which qualify each
column with its parent table's (or aliases) name so that name
conflicts between columns in different tables don't occur.
The format of the label is <tablename>_<column>. The "c"
collection of the resulting :class:`Select` object will use these
names as well for targeting column members.
:param for_update=False:
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement. Certain database dialects also support
alternate values for this parameter, for example mysql
supports "read" which translates to ``LOCK IN SHARE MODE``,
and oracle supports "nowait" which translates to ``FOR UPDATE
NOWAIT``.
:param correlate=True:
indicates that this :class:`Select` object should have its
contained :class:`FromClause` elements "correlated" to an enclosing
:class:`Select` object. This means that any :class:`ClauseElement`
instance within the "froms" collection of this :class:`Select`
which is also present in the "froms" collection of an
enclosing select will not be rendered in the ``FROM`` clause
of this select statement.
:param group_by:
a list of :class:`ClauseElement` objects which will comprise the
``GROUP BY`` clause of the resulting select.
:param having:
a :class:`ClauseElement` that will comprise the ``HAVING`` clause
of the resulting select when ``GROUP BY`` is used.
:param order_by:
a scalar or list of :class:`ClauseElement` objects which will
comprise the ``ORDER BY`` clause of the resulting select.
:param limit=None:
a numerical value which usually compiles to a ``LIMIT``
expression in the resulting select. Databases that don't
support ``LIMIT`` will attempt to provide similar
functionality.
:param offset=None:
a numeric value which usually compiles to an ``OFFSET``
expression in the resulting select. Databases that don't
support ``OFFSET`` will attempt to provide similar
functionality.
:param bind=None:
an ``Engine`` or ``Connection`` instance to which the
resulting ``Select ` object will be bound. The ``Select``
object will otherwise automatically bind to whatever
``Connectable`` instances can be located within its contained
:class:`ClauseElement` members.
"""
return Select(columns, whereclause=whereclause, from_obj=from_obj,
**kwargs)
def subquery(alias, *args, **kwargs):
"""Return an :class:`Alias` object derived
from a :class:`Select`.
name
alias name
\*args, \**kwargs
all other arguments are delivered to the
:func:`select` function.
"""
return Select(*args, **kwargs).alias(alias)
def insert(table, values=None, inline=False, **kwargs):
"""Return an :class:`Insert` clause element.
Similar functionality is available via the :func:`insert()` method on
:class:`~sqlalchemy.schema.Table`.
:param table: The table to be inserted into.
:param values: A dictionary which specifies the column specifications of
the ``INSERT``, and is optional. If left as None, the column
specifications are determined from the bind parameters used during the
compile phase of the ``INSERT`` statement. If the bind parameters also
are None during the compile phase, then the column specifications will be
generated from the full list of table columns. Note that the
:meth:`~Insert.values()` generative method may also be used for this.
:param prefixes: A list of modifier keywords to be inserted between INSERT
and INTO. Alternatively, the :meth:`~Insert.prefix_with` generative
method may be used.
:param inline: if True, SQL defaults will be compiled 'inline' into the
statement and not pre-executed.
If both `values` and compile-time bind parameters are present, the
compile-time bind parameters override the information specified
within `values` on a per-key basis.
The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
objects or their string identifiers. Each key may reference one of:
* a literal data value (i.e. string, number, etc.);
* a Column object;
* a SELECT statement.
If a ``SELECT`` statement is specified which references this
``INSERT`` statement's table, the statement will be correlated
against the ``INSERT`` statement.
"""
return Insert(table, values, inline=inline, **kwargs)
def update(table, whereclause=None, values=None, inline=False, **kwargs):
"""Return an :class:`Update` clause element.
Similar functionality is available via the :func:`update()` method on
:class:`~sqlalchemy.schema.Table`.
:param table: The table to be updated.
:param whereclause: A :class:`ClauseElement` describing the ``WHERE``
condition of the ``UPDATE`` statement. Note that the
:meth:`~Update.where()` generative method may also be used for this.
:param values:
A dictionary which specifies the ``SET`` conditions of the
``UPDATE``, and is optional. If left as None, the ``SET``
conditions are determined from the bind parameters used during
the compile phase of the ``UPDATE`` statement. If the bind
parameters also are None during the compile phase, then the
``SET`` conditions will be generated from the full list of table
columns. Note that the :meth:`~Update.values()` generative method may
also be used for this.
:param inline:
if True, SQL defaults will be compiled 'inline' into the statement
and not pre-executed.
If both `values` and compile-time bind parameters are present, the
compile-time bind parameters override the information specified
within `values` on a per-key basis.
The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
objects or their
string identifiers. Each key may reference one of:
* a literal data value (i.e. string, number, etc.);
* a Column object;
* a SELECT statement.
If a ``SELECT`` statement is specified which references this
``UPDATE`` statement's table, the statement will be correlated
against the ``UPDATE`` statement.
"""
return Update(
table,
whereclause=whereclause,
values=values,
inline=inline,
**kwargs)
def delete(table, whereclause = None, **kwargs):
"""Return a :class:`Delete` clause element.
Similar functionality is available via the :func:`delete()` method on
:class:`~sqlalchemy.schema.Table`.
:param table: The table to be updated.
:param whereclause: A :class:`ClauseElement` describing the ``WHERE``
condition of the ``UPDATE`` statement. Note that the
:meth:`~Delete.where()` generative method may be used instead.
"""
return Delete(table, whereclause, **kwargs)
def and_(*clauses):
"""Join a list of clauses together using the ``AND`` operator.
The ``&`` operator is also overloaded on all
:class:`_CompareMixin` subclasses to produce the
same result.
"""
if len(clauses) == 1:
return clauses[0]
return BooleanClauseList(operator=operators.and_, *clauses)
def or_(*clauses):
"""Join a list of clauses together using the ``OR`` operator.
The ``|`` operator is also overloaded on all
:class:`_CompareMixin` subclasses to produce the
same result.
"""
if len(clauses) == 1:
return clauses[0]
return BooleanClauseList(operator=operators.or_, *clauses)
def not_(clause):
"""Return a negation of the given clause, i.e. ``NOT(clause)``.
The ``~`` operator is also overloaded on all
:class:`_CompareMixin` subclasses to produce the
same result.
"""
return operators.inv(_literal_as_binds(clause))
def distinct(expr):
"""Return a ``DISTINCT`` clause."""
expr = _literal_as_binds(expr)
return _UnaryExpression(expr, operator=operators.distinct_op, type_=expr.type)
def between(ctest, cleft, cright):
"""Return a ``BETWEEN`` predicate clause.
Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
The :func:`between()` method on all
:class:`_CompareMixin` subclasses provides
similar functionality.
"""
ctest = _literal_as_binds(ctest)
return ctest.between(cleft, cright)
def case(whens, value=None, else_=None):
"""Produce a ``CASE`` statement.
whens
A sequence of pairs, or alternatively a dict,
to be translated into "WHEN / THEN" clauses.
value
Optional for simple case statements, produces
a column expression as in "CASE <expr> WHEN ..."
else\_
Optional as well, for case defaults produces
the "ELSE" portion of the "CASE" statement.
The expressions used for THEN and ELSE,
when specified as strings, will be interpreted
as bound values. To specify textual SQL expressions
for these, use the :func:`literal_column`
construct.
The expressions used for the WHEN criterion
may only be literal strings when "value" is
present, i.e. CASE table.somecol WHEN "x" THEN "y".
Otherwise, literal strings are not accepted
in this position, and either the text(<string>)
or literal(<string>) constructs must be used to
interpret raw string values.
Usage examples::
case([(orderline.c.qty > 100, item.c.specialprice),
(orderline.c.qty > 10, item.c.bulkprice)
], else_=item.c.regularprice)
case(value=emp.c.type, whens={
'engineer': emp.c.salary * 1.1,
'manager': emp.c.salary * 3,
})
Using :func:`literal_column()`, to allow for databases that
do not support bind parameters in the ``then`` clause. The type
can be specified which determines the type of the :func:`case()` construct
overall::
case([(orderline.c.qty > 100,
literal_column("'greaterthan100'", String)),
(orderline.c.qty > 10, literal_column("'greaterthan10'",
String))
], else_=literal_column("'lethan10'", String))
"""
return _Case(whens, value=value, else_=else_)
def cast(clause, totype, **kwargs):
"""Return a ``CAST`` function.
Equivalent of SQL ``CAST(clause AS totype)``.
Use with a :class:`~sqlalchemy.types.TypeEngine` subclass, i.e::
cast(table.c.unit_price * table.c.qty, Numeric(10,4))
or::
cast(table.c.timestamp, DATE)
"""
return _Cast(clause, totype, **kwargs)
def extract(field, expr):
"""Return the clause ``extract(field FROM expr)``."""
return _Extract(field, expr)
def collate(expression, collation):
"""Return the clause ``expression COLLATE collation``."""
expr = _literal_as_binds(expression)
return _BinaryExpression(
expr,
_literal_as_text(collation),
operators.collate, type_=expr.type)
def exists(*args, **kwargs):
"""Return an ``EXISTS`` clause as applied to a :class:`Select` object.
Calling styles are of the following forms::
# use on an existing select()
s = select([table.c.col1]).where(table.c.col2==5)
s = exists(s)
# construct a select() at once
exists(['*'], **select_arguments).where(criterion)
# columns argument is optional, generates "EXISTS (SELECT *)"
# by default.
exists().where(table.c.col2==5)
"""
return _Exists(*args, **kwargs)
def union(*selects, **kwargs):
"""Return a ``UNION`` of multiple selectables.
The returned object is an instance of
:class:`CompoundSelect`.
A similar :func:`union()` method is available on all
:class:`FromClause` subclasses.
\*selects
a list of :class:`Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
def union_all(*selects, **kwargs):
"""Return a ``UNION ALL`` of multiple selectables.
The returned object is an instance of
:class:`CompoundSelect`.
A similar :func:`union_all()` method is available on all
:class:`FromClause` subclasses.
\*selects
a list of :class:`Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
def except_(*selects, **kwargs):
"""Return an ``EXCEPT`` of multiple selectables.
The returned object is an instance of
:class:`CompoundSelect`.
\*selects
a list of :class:`Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
def except_all(*selects, **kwargs):
"""Return an ``EXCEPT ALL`` of multiple selectables.
The returned object is an instance of
:class:`CompoundSelect`.
\*selects
a list of :class:`Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
def intersect(*selects, **kwargs):
"""Return an ``INTERSECT`` of multiple selectables.
The returned object is an instance of
:class:`CompoundSelect`.
\*selects
a list of :class:`Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
def intersect_all(*selects, **kwargs):
"""Return an ``INTERSECT ALL`` of multiple selectables.
The returned object is an instance of
:class:`CompoundSelect`.
\*selects
a list of :class:`Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
def alias(selectable, alias=None):
"""Return an :class:`Alias` object.
An :class:`Alias` represents any :class:`FromClause`
with an alternate name assigned within SQL, typically using the ``AS``
clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
Similar functionality is available via the :func:`alias()` method
available on all :class:`FromClause` subclasses.
selectable
any :class:`FromClause` subclass, such as a table, select
statement, etc..
alias
string name to be assigned as the alias. If ``None``, a
random name will be generated.
"""
return Alias(selectable, alias=alias)
def literal(value, type_=None):
"""Return a literal clause, bound to a bind parameter.
Literal clauses are created automatically when non- :class:`ClauseElement`
objects (such as strings, ints, dates, etc.) are used in a comparison
operation with a :class:`_CompareMixin`
subclass, such as a :class:`~sqlalchemy.schema.Column` object. Use this function to force the
generation of a literal clause, which will be created as a
:class:`_BindParamClause` with a bound value.
:param value: the value to be bound. Can be any Python object supported by
the underlying DB-API, or is translatable via the given type argument.
:param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
will provide bind-parameter translation for this literal.
"""
return _BindParamClause(None, value, type_=type_, unique=True)
def tuple_(*expr):
"""Return a SQL tuple.
Main usage is to produce a composite IN construct::
tuple_(table.c.col1, table.c.col2).in_(
[(1, 2), (5, 12), (10, 19)]
)
"""
return _Tuple(*expr)
def label(name, obj):
"""Return a :class:`_Label` object for the
given :class:`ColumnElement`.
A label changes the name of an element in the columns clause of a
``SELECT`` statement, typically via the ``AS`` SQL keyword.
This functionality is more conveniently available via the
:func:`label()` method on :class:`ColumnElement`.
name
label name
obj
a :class:`ColumnElement`.
"""
return _Label(name, obj)
def column(text, type_=None):
"""Return a textual column clause, as would be in the columns clause of a
``SELECT`` statement.
The object returned is an instance of
:class:`ColumnClause`, which represents the
"syntactical" portion of the schema-level
:class:`~sqlalchemy.schema.Column` object.
text
the name of the column. Quoting rules will be applied to the
clause like any other column name. For textual column
constructs that are not to be quoted, use the
:func:`literal_column` function.
type\_
an optional :class:`~sqlalchemy.types.TypeEngine` object which will
provide result-set translation for this column.
"""
return ColumnClause(text, type_=type_)
def literal_column(text, type_=None):
"""Return a textual column expression, as would be in the columns
clause of a ``SELECT`` statement.
The object returned supports further expressions in the same way as any
other column object, including comparison, math and string operations.
The type\_ parameter is important to determine proper expression behavior
(such as, '+' means string concatenation or numerical addition based on
the type).
text
the text of the expression; can be any SQL expression. Quoting rules
will not be applied. To specify a column-name expression which should
be subject to quoting rules, use the
:func:`column` function.
type\_
an optional :class:`~sqlalchemy.types.TypeEngine` object which will
provide result-set translation and additional expression semantics for
this column. If left as None the type will be NullType.
"""
return ColumnClause(text, type_=type_, is_literal=True)
def table(name, *columns):
"""Return a :class:`TableClause` object.
This is a primitive version of the :class:`~sqlalchemy.schema.Table` object,
which is a subclass of this object.
"""
return TableClause(name, *columns)
def bindparam(key, value=None, type_=None, unique=False, required=False):
"""Create a bind parameter clause with the given key.
value
a default value for this bind parameter. a bindparam with a
value is called a ``value-based bindparam``.
type\_
a sqlalchemy.types.TypeEngine object indicating the type of this
bind param, will invoke type-specific bind parameter processing
unique
if True, bind params sharing the same name will have their
underlying ``key`` modified to a uniquely generated name.
mostly useful with value-based bind params.
required
A value is required at execution time.
"""
if isinstance(key, ColumnClause):
return _BindParamClause(key.name, value, type_=key.type,
unique=unique, required=required)
else:
return _BindParamClause(key, value, type_=type_,
unique=unique, required=required)
def outparam(key, type_=None):
"""Create an 'OUT' parameter for usage in functions (stored procedures),
for databases which support them.
The ``outparam`` can be used like a regular function parameter.
The "output" value will be available from the
:class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
attribute, which returns a dictionary containing the values.
"""
return _BindParamClause(
key, None, type_=type_, unique=False, isoutparam=True)
def text(text, bind=None, *args, **kwargs):
"""Create a SQL construct that is represented by a literal string.
E.g.::
t = text("SELECT * FROM users")
result = connection.execute(t)
The advantages :func:`text` provides over a plain string are
backend-neutral support for bind parameters, per-statement
execution options, as well as
bind parameter and result-column typing behavior, allowing
SQLAlchemy type constructs to play a role when executing
a statement that is specified literally.
Bind parameters are specified by name, using the format ``:name``.
E.g.::
t = text("SELECT * FROM users WHERE id=:user_id")
result = connection.execute(t, user_id=12)
To invoke SQLAlchemy typing logic for bind parameters, the
``bindparams`` list allows specification of :func:`bindparam`
constructs which specify the type for a given name::
t = text("SELECT id FROM users WHERE updated_at>:updated",
bindparams=[bindparam('updated', DateTime())]
)
Typing during result row processing is also an important concern.
Result column types
are specified using the ``typemap`` dictionary, where the keys
match the names of columns. These names are taken from what
the DBAPI returns as ``cursor.description``::
t = text("SELECT id, name FROM users",
typemap={
'id':Integer,
'name':Unicode
}
)
The :func:`text` construct is used internally for most cases when
a literal string is specified for part of a larger query, such as
within :func:`select()`, :func:`update()`,
:func:`insert()` or :func:`delete()`. In those cases, the same
bind parameter syntax is applied::
s = select([users.c.id, users.c.name]).where("id=:user_id")
result = connection.execute(s, user_id=12)
Using :func:`text` explicitly usually implies the construction
of a full, standalone statement. As such, SQLAlchemy refers
to it as an :class:`Executable` object, and it supports
the :meth:`Executable.execution_options` method. For example,
a :func:`text` construct that should be subject to "autocommit"
can be set explicitly so using the ``autocommit`` option::
t = text("EXEC my_procedural_thing()").\\
execution_options(autocommit=True)
Note that SQLAlchemy's usual "autocommit" behavior applies to
:func:`text` constructs - that is, statements which begin
with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
or a variety of other phrases specific to certain backends, will
be eligible for autocommit if no transaction is in progress.
:param text:
the text of the SQL statement to be created. use ``:<param>``
to specify bind parameters; they will be compiled to their
engine-specific format.
:param autocommit:
Deprecated. Use .execution_options(autocommit=<True|False>)
to set the autocommit option.
:param bind:
an optional connection or engine to be used for this text query.
:param bindparams:
a list of :func:`bindparam()` instances which can be used to define
the types and/or initial values for the bind parameters within
the textual statement; the keynames of the bindparams must match
those within the text of the statement. The types will be used
for pre-processing on bind values.
:param typemap:
a dictionary mapping the names of columns represented in the
columns clause of a ``SELECT`` statement to type objects,
which will be used to perform post-processing on columns within
the result set. This argument applies to any expression
that returns result sets.
"""
return _TextClause(text, bind=bind, *args, **kwargs)
def null():
"""Return a :class:`_Null` object, which compiles to ``NULL`` in a sql
statement.
"""
return _Null()
class _FunctionGenerator(object):
"""Generate :class:`Function` objects based on getattr calls."""
def __init__(self, **opts):
self.__names = []
self.opts = opts
def __getattr__(self, name):
# passthru __ attributes; fixes pydoc
if name.startswith('__'):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
elif name.endswith('_'):
name = name[0:-1]
f = _FunctionGenerator(**self.opts)
f.__names = list(self.__names) + [name]
return f
def __call__(self, *c, **kwargs):
o = self.opts.copy()
o.update(kwargs)
if len(self.__names) == 1:
global functions
if functions is None:
from sqlalchemy.sql import functions
func = getattr(functions, self.__names[-1].lower(), None)
if func is not None and \
isinstance(func, type) and \
issubclass(func, Function):
return func(*c, **o)
return Function(self.__names[-1],
packagenames=self.__names[0:-1], *c, **o)
# "func" global - i.e. func.count()
func = _FunctionGenerator()
# "modifier" global - i.e. modifier.distinct
# TODO: use UnaryExpression for this instead ?
modifier = _FunctionGenerator(group=False)
class _generated_label(unicode):
"""A unicode subclass used to identify dynamically generated names."""
def _escape_for_generated(x):
if isinstance(x, _generated_label):
return x
else:
return x.replace('%', '%%')
def _clone(element):
return element._clone()
def _expand_cloned(elements):
"""expand the given set of ClauseElements to be the set of all 'cloned'
predecessors.
"""
return itertools.chain(*[x._cloned_set for x in elements])
def _select_iterables(elements):
"""expand tables into individual columns in the
given list of column expressions.
"""
return itertools.chain(*[c._select_iterable for c in elements])
def _cloned_intersection(a, b):
"""return the intersection of sets a and b, counting
any overlap between 'cloned' predecessors.
The returned set is in terms of the enties present within 'a'.
"""
all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
return set(elem for elem in a
if all_overlap.intersection(elem._cloned_set))
def _is_literal(element):
return not isinstance(element, Visitable) and \
not hasattr(element, '__clause_element__')
def _from_objects(*elements):
return itertools.chain(*[element._from_objects for element in elements])
def _labeled(element):
if not hasattr(element, 'name'):
return element.label(None)
else:
return element
def _column_as_key(element):
if isinstance(element, basestring):
return element
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
return element.key
def _literal_as_text(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif not isinstance(element, Visitable):
return _TextClause(unicode(element))
else:
return element
def _clause_element_as_expr(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
else:
return element
def _literal_as_column(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif not isinstance(element, Visitable):
return literal_column(str(element))
else:
return element
def _literal_as_binds(element, name=None, type_=None):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif not isinstance(element, Visitable):
if element is None:
return null()
else:
return _BindParamClause(name, element, type_=type_, unique=True)
else:
return element
def _type_from_args(args):
for a in args:
if not isinstance(a.type, sqltypes.NullType):
return a.type
else:
return sqltypes.NullType
def _no_literals(element):
if hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif not isinstance(element, Visitable):
raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
"function to indicate a SQL expression "
"literal, or 'literal()' to indicate a "
"bound value." % element)
else:
return element
def _only_column_elements(element, name):
if hasattr(element, '__clause_element__'):
element = element.__clause_element__()
if not isinstance(element, ColumnElement):
raise exc.ArgumentError("Column-based expression object expected for argument '%s'; "
"got: '%s', type %s" % (name, element, type(element)))
return element
def _corresponding_column_or_error(fromclause, column,
require_embedded=False):
c = fromclause.corresponding_column(column,
require_embedded=require_embedded)
if c is None:
raise exc.InvalidRequestError(
"Given column '%s', attached to table '%s', "
"failed to locate a corresponding column from table '%s'"
%
(column,
getattr(column, 'table', None),fromclause.description)
)
return c
@util.decorator
def _generative(fn, *args, **kw):
"""Mark a method as generative."""
self = args[0]._generate()
fn(self, *args[1:], **kw)
return self
def is_column(col):
"""True if ``col`` is an instance of :class:`ColumnElement`."""
return isinstance(col, ColumnElement)
class ClauseElement(Visitable):
"""Base class for elements of a programmatically constructed SQL
expression.
"""
__visit_name__ = 'clause'
_annotations = {}
supports_execution = False
_from_objects = []
_bind = None
def _clone(self):
"""Create a shallow copy of this ClauseElement.
This method may be used by a generative API. Its also used as
part of the "deep" copy afforded by a traversal that combines
the _copy_internals() method.
"""
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
c.__dict__.pop('_cloned_set', None)
# this is a marker that helps to "equate" clauses to each other
# when a Select returns its list of FROM clauses. the cloning
# process leaves around a lot of remnants of the previous clause
# typically in the form of column expressions still attached to the
# old table.
c._is_clone_of = self
return c
@property
def _constructor(self):
"""return the 'constructor' for this ClauseElement.
This is for the purposes for creating a new object of
this type. Usually, its just the element's __class__.
However, the "Annotated" version of the object overrides
to return the class of its proxied element.
"""
return self.__class__
@util.memoized_property
def _cloned_set(self):
"""Return the set consisting all cloned anscestors of this
ClauseElement.
Includes this ClauseElement. This accessor tends to be used for
FromClause objects to identify 'equivalent' FROM clauses, regardless
of transformative operations.
"""
s = util.column_set()
f = self
while f is not None:
s.add(f)
f = getattr(f, '_is_clone_of', None)
return s
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_is_clone_of', None)
return d
if util.jython:
def __hash__(self):
"""Return a distinct hash code.
ClauseElements may have special equality comparisons which
makes us rely on them having unique hash codes for use in
hash-based collections. Stock __hash__ doesn't guarantee
unique values on platforms with moving GCs.
"""
return id(self)
def _annotate(self, values):
"""return a copy of this ClauseElement with the given annotations
dictionary.
"""
global sql_util
if sql_util is None:
from sqlalchemy.sql import util as sql_util
return sql_util.Annotated(self, values)
def _deannotate(self):
"""return a copy of this ClauseElement with an empty annotations
dictionary.
"""
return self._clone()
def unique_params(self, *optionaldict, **kwargs):
"""Return a copy with :func:`bindparam()` elments replaced.
Same functionality as ``params()``, except adds `unique=True`
to affected bind parameters so that multiple statements can be
used.
"""
return self._params(True, optionaldict, kwargs)
def params(self, *optionaldict, **kwargs):
"""Return a copy with :func:`bindparam()` elments replaced.
Returns a copy of this ClauseElement with :func:`bindparam()`
elements replaced with values taken from the given dictionary::
>>> clause = column('x') + bindparam('foo')
>>> print clause.compile().params
{'foo':None}
>>> print clause.params({'foo':7}).compile().params
{'foo':7}
"""
return self._params(False, optionaldict, kwargs)
def _params(self, unique, optionaldict, kwargs):
if len(optionaldict) == 1:
kwargs.update(optionaldict[0])
elif len(optionaldict) > 1:
raise exc.ArgumentError(
"params() takes zero or one positional dictionary argument")
def visit_bindparam(bind):
if bind.key in kwargs:
bind.value = kwargs[bind.key]
if unique:
bind._convert_to_unique()
return cloned_traverse(self, {}, {'bindparam':visit_bindparam})
def compare(self, other, **kw):
"""Compare this ClauseElement to the given ClauseElement.
Subclasses should override the default behavior, which is a
straight identity comparison.
\**kw are arguments consumed by subclass compare() methods and
may be used to modify the criteria for comparison.
(see :class:`ColumnElement`)
"""
return self is other
def _copy_internals(self, clone=_clone):
"""Reassign internal elements to be clones of themselves.
Called during a copy-and-traverse operation on newly
shallow-copied elements to create a deep copy.
"""
pass
def get_children(self, **kwargs):
"""Return immediate child elements of this :class:`ClauseElement`.
This is used for visit traversal.
\**kwargs may contain flags that change the collection that is
returned, for example to return a subset of items in order to
cut down on larger traversals, or to return child items from a
different context (such as schema-level collections instead of
clause-level).
"""
return []
def self_group(self, against=None):
return self
# TODO: remove .bind as a method from the root ClauseElement.
# we should only be deriving binds from FromClause elements
# and certain SchemaItem subclasses.
# the "search_for_bind" functionality can still be used by
# execute(), however.
@property
def bind(self):
"""Returns the Engine or Connection to which this ClauseElement is
bound, or None if none found.
"""
if self._bind is not None:
return self._bind
for f in _from_objects(self):
if f is self:
continue
engine = f.bind
if engine is not None:
return engine
else:
return None
@util.pending_deprecation('0.7',
'Only SQL expressions which subclass '
':class:`.Executable` may provide the '
':func:`.execute` method.')
def execute(self, *multiparams, **params):
"""Compile and execute this :class:`ClauseElement`.
"""
e = self.bind
if e is None:
label = getattr(self, 'description', self.__class__.__name__)
msg = ('This %s is not bound and does not support direct '
'execution. Supply this statement to a Connection or '
'Engine for execution. Or, assign a bind to the statement '
'or the Metadata of its underlying tables to enable '
'implicit execution via this method.' % label)
raise exc.UnboundExecutionError(msg)
return e._execute_clauseelement(self, multiparams, params)
@util.pending_deprecation('0.7',
'Only SQL expressions which subclass '
':class:`.Executable` may provide the '
':func:`.scalar` method.')
def scalar(self, *multiparams, **params):
"""Compile and execute this :class:`ClauseElement`, returning
the result's scalar representation.
"""
return self.execute(*multiparams, **params).scalar()
def compile(self, bind=None, dialect=None, **kw):
"""Compile this SQL expression.
The return value is a :class:`~sqlalchemy.engine.Compiled` object.
Calling ``str()`` or ``unicode()`` on the returned value will yield a
string representation of the result. The
:class:`~sqlalchemy.engine.Compiled` object also can return a
dictionary of bind parameter names and values
using the ``params`` accessor.
:param bind: An ``Engine`` or ``Connection`` from which a
``Compiled`` will be acquired. This argument takes precedence over
this :class:`ClauseElement`'s bound engine, if any.
:param column_keys: Used for INSERT and UPDATE statements, a list of
column names which should be present in the VALUES clause of the
compiled statement. If ``None``, all columns from the target table
object are rendered.
:param dialect: A ``Dialect`` instance frmo which a ``Compiled``
will be acquired. This argument takes precedence over the `bind`
argument as well as this :class:`ClauseElement`'s bound engine, if
any.
:param inline: Used for INSERT statements, for a dialect which does
not support inline retrieval of newly generated primary key
columns, will force the expression used to create the new primary
key value to be rendered inline within the INSERT statement's
VALUES clause. This typically refers to Sequence execution but may
also refer to any server-side default generation function
associated with a primary key `Column`.
"""
if not dialect:
if bind:
dialect = bind.dialect
elif self.bind:
dialect = self.bind.dialect
bind = self.bind
else:
global DefaultDialect
if DefaultDialect is None:
from sqlalchemy.engine.default import DefaultDialect
dialect = DefaultDialect()
compiler = self._compiler(dialect, bind=bind, **kw)
compiler.compile()
return compiler
def _compiler(self, dialect, **kw):
"""Return a compiler appropriate for this ClauseElement, given a
Dialect."""
return dialect.statement_compiler(dialect, self, **kw)
def __str__(self):
# Py3K
#return unicode(self.compile())
# Py2K
return unicode(self.compile()).encode('ascii', 'backslashreplace')
# end Py2K
def __and__(self, other):
return and_(self, other)
def __or__(self, other):
return or_(self, other)
def __invert__(self):
return self._negate()
def __nonzero__(self):
raise TypeError("Boolean value of this clause is not defined")
def _negate(self):
if hasattr(self, 'negation_clause'):
return self.negation_clause
else:
return _UnaryExpression(
self.self_group(against=operators.inv),
operator=operators.inv,
negate=None)
def __repr__(self):
friendly = getattr(self, 'description', None)
if friendly is None:
return object.__repr__(self)
else:
return '<%s.%s at 0x%x; %s>' % (
self.__module__, self.__class__.__name__, id(self), friendly)
class _Immutable(object):
"""mark a ClauseElement as 'immutable' when expressions are cloned."""
def unique_params(self, *optionaldict, **kwargs):
raise NotImplementedError("Immutable objects do not support copying")
def params(self, *optionaldict, **kwargs):
raise NotImplementedError("Immutable objects do not support copying")
def _clone(self):
return self
class Operators(object):
def __and__(self, other):
return self.operate(operators.and_, other)
def __or__(self, other):
return self.operate(operators.or_, other)
def __invert__(self):
return self.operate(operators.inv)
def op(self, opstring):
def op(b):
return self.operate(operators.op, opstring, b)
return op
def operate(self, op, *other, **kwargs):
raise NotImplementedError(str(op))
def reverse_operate(self, op, other, **kwargs):
raise NotImplementedError(str(op))
class ColumnOperators(Operators):
"""Defines comparison and math operations."""
timetuple = None
"""Hack, allows datetime objects to be compared on the LHS."""
def __lt__(self, other):
return self.operate(operators.lt, other)
def __le__(self, other):
return self.operate(operators.le, other)
__hash__ = Operators.__hash__
def __eq__(self, other):
return self.operate(operators.eq, other)
def __ne__(self, other):
return self.operate(operators.ne, other)
def __gt__(self, other):
return self.operate(operators.gt, other)
def __ge__(self, other):
return self.operate(operators.ge, other)
def __neg__(self):
return self.operate(operators.neg)
def concat(self, other):
return self.operate(operators.concat_op, other)
def like(self, other, escape=None):
return self.operate(operators.like_op, other, escape=escape)
def ilike(self, other, escape=None):
return self.operate(operators.ilike_op, other, escape=escape)
def in_(self, other):
return self.operate(operators.in_op, other)
def startswith(self, other, **kwargs):
return self.operate(operators.startswith_op, other, **kwargs)
def endswith(self, other, **kwargs):
return self.operate(operators.endswith_op, other, **kwargs)
def contains(self, other, **kwargs):
return self.operate(operators.contains_op, other, **kwargs)
def match(self, other, **kwargs):
return self.operate(operators.match_op, other, **kwargs)
def desc(self):
return self.operate(operators.desc_op)
def asc(self):
return self.operate(operators.asc_op)
def collate(self, collation):
return self.operate(operators.collate, collation)
def __radd__(self, other):
return self.reverse_operate(operators.add, other)
def __rsub__(self, other):
return self.reverse_operate(operators.sub, other)
def __rmul__(self, other):
return self.reverse_operate(operators.mul, other)
def __rdiv__(self, other):
return self.reverse_operate(operators.div, other)
def between(self, cleft, cright):
return self.operate(operators.between_op, cleft, cright)
def distinct(self):
return self.operate(operators.distinct_op)
def __add__(self, other):
return self.operate(operators.add, other)
def __sub__(self, other):
return self.operate(operators.sub, other)
def __mul__(self, other):
return self.operate(operators.mul, other)
def __div__(self, other):
return self.operate(operators.div, other)
def __mod__(self, other):
return self.operate(operators.mod, other)
def __truediv__(self, other):
return self.operate(operators.truediv, other)
def __rtruediv__(self, other):
return self.reverse_operate(operators.truediv, other)
class _CompareMixin(ColumnOperators):
"""Defines comparison and math operations for :class:`ClauseElement`
instances."""
def __compare(self, op, obj, negate=None, reverse=False,
**kwargs
):
if obj is None or isinstance(obj, _Null):
if op == operators.eq:
return _BinaryExpression(self, null(), operators.is_,
negate=operators.isnot)
elif op == operators.ne:
return _BinaryExpression(self, null(), operators.isnot,
negate=operators.is_)
else:
raise exc.ArgumentError("Only '='/'!=' operators can "
"be used with NULL")
else:
obj = self._check_literal(op, obj)
if reverse:
return _BinaryExpression(obj,
self,
op,
type_=sqltypes.BOOLEANTYPE,
negate=negate, modifiers=kwargs)
else:
return _BinaryExpression(self,
obj,
op,
type_=sqltypes.BOOLEANTYPE,
negate=negate, modifiers=kwargs)
def __operate(self, op, obj, reverse=False):
obj = self._check_literal(op, obj)
if reverse:
left, right = obj, self
else:
left, right = self, obj
if left.type is None:
op, result_type = sqltypes.NULLTYPE._adapt_expression(op,
right.type)
elif right.type is None:
op, result_type = left.type._adapt_expression(op,
sqltypes.NULLTYPE)
else:
op, result_type = left.type._adapt_expression(op,
right.type)
return _BinaryExpression(left, right, op, type_=result_type)
# a mapping of operators with the method they use, along with their negated
# operator for comparison operators
operators = {
operators.add : (__operate,),
operators.mul : (__operate,),
operators.sub : (__operate,),
# Py2K
operators.div : (__operate,),
# end Py2K
operators.mod : (__operate,),
operators.truediv : (__operate,),
operators.lt : (__compare, operators.ge),
operators.le : (__compare, operators.gt),
operators.ne : (__compare, operators.eq),
operators.gt : (__compare, operators.le),
operators.ge : (__compare, operators.lt),
operators.eq : (__compare, operators.ne),
operators.like_op : (__compare, operators.notlike_op),
operators.ilike_op : (__compare, operators.notilike_op),
}
def operate(self, op, *other, **kwargs):
o = _CompareMixin.operators[op]
return o[0](self, op, other[0], *o[1:], **kwargs)
def reverse_operate(self, op, other, **kwargs):
o = _CompareMixin.operators[op]
return o[0](self, op, other, reverse=True, *o[1:], **kwargs)
def in_(self, other):
return self._in_impl(operators.in_op, operators.notin_op, other)
def _in_impl(self, op, negate_op, seq_or_selectable):
seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
if isinstance(seq_or_selectable, _ScalarSelect):
return self.__compare(op, seq_or_selectable,
negate=negate_op)
elif isinstance(seq_or_selectable, _SelectBaseMixin):
# TODO: if we ever want to support (x, y, z) IN (select x,
# y, z from table), we would need a multi-column version of
# as_scalar() to produce a multi- column selectable that
# does not export itself as a FROM clause
return self.__compare(op, seq_or_selectable.as_scalar(),
negate=negate_op)
elif isinstance(seq_or_selectable, (Selectable, _TextClause)):
return self.__compare(op, seq_or_selectable,
negate=negate_op)
# Handle non selectable arguments as sequences
args = []
for o in seq_or_selectable:
if not _is_literal(o):
if not isinstance(o, _CompareMixin):
raise exc.InvalidRequestError('in() function accept'
's either a list of non-selectable values, '
'or a selectable: %r' % o)
else:
o = self._bind_param(op, o)
args.append(o)
if len(args) == 0:
# Special case handling for empty IN's, behave like
# comparison against zero row selectable. We use != to
# build the contradiction as it handles NULL values
# appropriately, i.e. "not (x IN ())" should not return NULL
# values for x.
util.warn('The IN-predicate on "%s" was invoked with an '
'empty sequence. This results in a '
'contradiction, which nonetheless can be '
'expensive to evaluate. Consider alternative '
'strategies for improved performance.' % self)
return self != self
return self.__compare(op,
ClauseList(*args).self_group(against=op),
negate=negate_op)
def __neg__(self):
return _UnaryExpression(self, operator=operators.neg)
def startswith(self, other, escape=None):
"""Produce the clause ``LIKE '<other>%'``"""
# use __radd__ to force string concat behavior
return self.__compare(
operators.like_op,
literal_column("'%'", type_=sqltypes.String).__radd__(
self._check_literal(operators.like_op, other)
),
escape=escape)
def endswith(self, other, escape=None):
"""Produce the clause ``LIKE '%<other>'``"""
return self.__compare(
operators.like_op,
literal_column("'%'", type_=sqltypes.String) +
self._check_literal(operators.like_op, other),
escape=escape)
def contains(self, other, escape=None):
"""Produce the clause ``LIKE '%<other>%'``"""
return self.__compare(
operators.like_op,
literal_column("'%'", type_=sqltypes.String) +
self._check_literal(operators.like_op, other) +
literal_column("'%'", type_=sqltypes.String),
escape=escape)
def match(self, other):
"""Produce a MATCH clause, i.e. ``MATCH '<other>'``
The allowed contents of ``other`` are database backend specific.
"""
return self.__compare(operators.match_op,
self._check_literal(operators.match_op,
other))
def label(self, name):
"""Produce a column label, i.e. ``<columnname> AS <name>``.
if 'name' is None, an anonymous label name will be generated.
"""
return _Label(name, self, self.type)
def desc(self):
"""Produce a DESC clause, i.e. ``<columnname> DESC``"""
return desc(self)
def asc(self):
"""Produce a ASC clause, i.e. ``<columnname> ASC``"""
return asc(self)
def distinct(self):
"""Produce a DISTINCT clause, i.e. ``DISTINCT <columnname>``"""
return _UnaryExpression(self, operator=operators.distinct_op,
type_=self.type)
def between(self, cleft, cright):
"""Produce a BETWEEN clause, i.e. ``<column> BETWEEN <cleft> AND
<cright>``"""
return _BinaryExpression(
self,
ClauseList(
self._check_literal(operators.and_, cleft),
self._check_literal(operators.and_, cright),
operator=operators.and_,
group=False),
operators.between_op)
def collate(self, collation):
"""Produce a COLLATE clause, i.e. ``<column> COLLATE utf8_bin``"""
return collate(self, collation)
def op(self, operator):
"""produce a generic operator function.
e.g.::
somecolumn.op("*")(5)
produces::
somecolumn * 5
:param operator: a string which will be output as the infix operator
between this :class:`ClauseElement` and the expression passed to the
generated function.
This function can also be used to make bitwise operators explicit. For
example::
somecolumn.op('&')(0xff)
is a bitwise AND of the value in somecolumn.
"""
return lambda other: self.__operate(operator, other)
def _bind_param(self, operator, obj):
return _BindParamClause(None, obj,
_compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
def _check_literal(self, operator, other):
if isinstance(other, _BindParamClause) and \
isinstance(other.type, sqltypes.NullType):
# TODO: perhaps we should not mutate the incoming bindparam()
# here and instead make a copy of it. this might
# be the only place that we're mutating an incoming construct.
other.type = self.type
return other
elif hasattr(other, '__clause_element__'):
return other.__clause_element__()
elif not isinstance(other, ClauseElement):
return self._bind_param(operator, other)
elif isinstance(other, (_SelectBaseMixin, Alias)):
return other.as_scalar()
else:
return other
class ColumnElement(ClauseElement, _CompareMixin):
"""Represent an element that is usable within the "column clause" portion
of a ``SELECT`` statement.
This includes columns associated with tables, aliases, and
subqueries, expressions, function calls, SQL keywords such as
``NULL``, literals, etc. :class:`ColumnElement` is the ultimate base
class for all such elements.
:class:`ColumnElement` supports the ability to be a *proxy* element,
which indicates that the :class:`ColumnElement` may be associated with
a :class:`Selectable` which was derived from another :class:`Selectable`.
An example of a "derived" :class:`Selectable` is an :class:`Alias` of a
:class:`~sqlalchemy.schema.Table`.
A :class:`ColumnElement`, by subclassing the :class:`_CompareMixin` mixin
class, provides the ability to generate new :class:`ClauseElement`
objects using Python expressions. See the :class:`_CompareMixin`
docstring for more details.
"""
__visit_name__ = 'column'
primary_key = False
foreign_keys = []
quote = None
_label = None
@property
def _select_iterable(self):
return (self, )
@util.memoized_property
def base_columns(self):
return util.column_set(c for c in self.proxy_set
if not hasattr(c, 'proxies'))
@util.memoized_property
def proxy_set(self):
s = util.column_set([self])
if hasattr(self, 'proxies'):
for c in self.proxies:
s.update(c.proxy_set)
return s
def shares_lineage(self, othercolumn):
"""Return True if the given :class:`ColumnElement`
has a common ancestor to this :class:`ColumnElement`."""
return bool(self.proxy_set.intersection(othercolumn.proxy_set))
def _make_proxy(self, selectable, name=None):
"""Create a new :class:`ColumnElement` representing this
:class:`ColumnElement` as it appears in the select list of a
descending selectable.
"""
if name:
co = ColumnClause(name, selectable, type_=getattr(self,
'type', None))
else:
name = str(self)
co = ColumnClause(self.anon_label, selectable,
type_=getattr(self, 'type', None))
co.proxies = [self]
selectable.columns[name] = co
return co
def compare(self, other, use_proxies=False, equivalents=None, **kw):
"""Compare this ColumnElement to another.
Special arguments understood:
:param use_proxies: when True, consider two columns that
share a common base column as equivalent (i.e. shares_lineage())
:param equivalents: a dictionary of columns as keys mapped to sets
of columns. If the given "other" column is present in this
dictionary, if any of the columns in the correponding set() pass the
comparison test, the result is True. This is used to expand the
comparison to other columns that may be known to be equivalent to
this one via foreign key or other criterion.
"""
to_compare = (other, )
if equivalents and other in equivalents:
to_compare = equivalents[other].union(to_compare)
for oth in to_compare:
if use_proxies and self.shares_lineage(oth):
return True
elif oth is self:
return True
else:
return False
@util.memoized_property
def anon_label(self):
"""provides a constant 'anonymous label' for this ColumnElement.
This is a label() expression which will be named at compile time.
The same label() is returned each time anon_label is called so
that expressions can reference anon_label multiple times, producing
the same label name at compile time.
the compiler uses this function automatically at compile time
for expressions that are known to be 'unnamed' like binary
expressions and function calls.
"""
return _generated_label('%%(%d %s)s' % (id(self), getattr(self,
'name', 'anon')))
class ColumnCollection(util.OrderedProperties):
"""An ordered dictionary that stores a list of ColumnElement
instances.
Overrides the ``__eq__()`` method to produce SQL clauses between
sets of correlated columns.
"""
def __init__(self, *cols):
super(ColumnCollection, self).__init__()
self.update((c.key, c) for c in cols)
def __str__(self):
return repr([str(c) for c in self])
def replace(self, column):
"""add the given column to this collection, removing unaliased
versions of this column as well as existing columns with the
same key.
e.g.::
t = Table('sometable', metadata, Column('col1', Integer))
t.columns.replace(Column('col1', Integer, key='columnone'))
will remove the original 'col1' from the collection, and add
the new column under the name 'columnname'.
Used by schema.Column to override columns during table reflection.
"""
if column.name in self and column.key != column.name:
other = self[column.name]
if other.name == other.key:
del self[other.name]
util.OrderedProperties.__setitem__(self, column.key, column)
def add(self, column):
"""Add a column to this collection.
The key attribute of the column will be used as the hash key
for this dictionary.
"""
self[column.key] = column
def __setitem__(self, key, value):
if key in self:
# this warning is primarily to catch select() statements
# which have conflicting column names in their exported
# columns collection
existing = self[key]
if not existing.shares_lineage(value):
util.warn('Column %r on table %r being replaced by '
'another column with the same key. Consider '
'use_labels for select() statements.' % (key,
getattr(existing, 'table', None)))
util.OrderedProperties.__setitem__(self, key, value)
def remove(self, column):
del self[column.key]
def extend(self, iter):
for c in iter:
self.add(c)
__hash__ = None
def __eq__(self, other):
l = []
for c in other:
for local in self:
if c.shares_lineage(local):
l.append(c==local)
return and_(*l)
def __contains__(self, other):
if not isinstance(other, basestring):
raise exc.ArgumentError("__contains__ requires a string argument")
return util.OrderedProperties.__contains__(self, other)
def contains_column(self, col):
# have to use a Set here, because it will compare the identity
# of the column, not just using "==" for comparison which will
# always return a "True" value (i.e. a BinaryClause...)
return col in util.column_set(self)
class ColumnSet(util.ordered_column_set):
def contains_column(self, col):
return col in self
def extend(self, cols):
for col in cols:
self.add(col)
def __add__(self, other):
return list(self) + list(other)
def __eq__(self, other):
l = []
for c in other:
for local in self:
if c.shares_lineage(local):
l.append(c==local)
return and_(*l)
def __hash__(self):
return hash(tuple(x for x in self))
class Selectable(ClauseElement):
"""mark a class as being selectable"""
__visit_name__ = 'selectable'
class FromClause(Selectable):
"""Represent an element that can be used within the ``FROM``
clause of a ``SELECT`` statement.
"""
__visit_name__ = 'fromclause'
named_with_column = False
_hide_froms = []
quote = None
schema = None
def count(self, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`FromClause`."""
if self.primary_key:
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
return select(
[func.count(col).label('tbl_row_count')],
whereclause,
from_obj=[self],
**params)
def select(self, whereclause=None, **params):
"""return a SELECT of this :class:`FromClause`."""
return select([self], whereclause, **params)
def join(self, right, onclause=None, isouter=False):
"""return a join of this :class:`FromClause` against another
:class:`FromClause`."""
return Join(self, right, onclause, isouter)
def outerjoin(self, right, onclause=None):
"""return an outer join of this :class:`FromClause` against another
:class:`FromClause`."""
return Join(self, right, onclause, True)
def alias(self, name=None):
"""return an alias of this :class:`FromClause`.
For table objects, this has the effect of the table being rendered
as ``tablename AS aliasname`` in a SELECT statement.
For select objects, the effect is that of creating a named
subquery, i.e. ``(select ...) AS aliasname``.
The :func:`alias()` method is the general way to create
a "subquery" out of an existing SELECT.
The ``name`` parameter is optional, and if left blank an
"anonymous" name will be generated at compile time, guaranteed
to be unique against other anonymous constructs used in the
same statement.
"""
return Alias(self, name)
def is_derived_from(self, fromclause):
"""Return True if this FromClause is 'derived' from the given
FromClause.
An example would be an Alias of a Table is derived from that Table.
"""
return fromclause in self._cloned_set
def replace_selectable(self, old, alias):
"""replace all occurences of FromClause 'old' with the given Alias
object, returning a copy of this :class:`FromClause`.
"""
global sql_util
if sql_util is None:
from sqlalchemy.sql import util as sql_util
return sql_util.ClauseAdapter(alias).traverse(self)
def correspond_on_equivalents(self, column, equivalents):
"""Return corresponding_column for the given column, or if None
search for a match in the given dictionary.
"""
col = self.corresponding_column(column, require_embedded=True)
if col is None and col in equivalents:
for equiv in equivalents[col]:
nc = self.corresponding_column(equiv, require_embedded=True)
if nc:
return nc
return col
def corresponding_column(self, column, require_embedded=False):
"""Given a :class:`ColumnElement`, return the exported
:class:`ColumnElement` object from this :class:`Selectable`
which corresponds to that original
:class:`~sqlalchemy.schema.Column` via a common anscestor
column.
:param column: the target :class:`ColumnElement` to be matched
:param require_embedded: only return corresponding columns for
the given :class:`ColumnElement`, if the given
:class:`ColumnElement` is actually present within a sub-element
of this :class:`FromClause`. Normally the column will match if
it merely shares a common anscestor with one of the exported
columns of this :class:`FromClause`.
"""
# dont dig around if the column is locally present
if self.c.contains_column(column):
return column
col, intersect = None, None
target_set = column.proxy_set
cols = self.c
for c in cols:
i = target_set.intersection(itertools.chain(*[p._cloned_set
for p in c.proxy_set]))
if i and (not require_embedded
or c.proxy_set.issuperset(target_set)):
if col is None:
# no corresponding column yet, pick this one.
col, intersect = c, i
elif len(i) > len(intersect):
# 'c' has a larger field of correspondence than
# 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
# matches a1.c.x->table.c.x better than
# selectable.c.x->table.c.x does.
col, intersect = c, i
elif i == intersect:
# they have the same field of correspondence. see
# which proxy_set has fewer columns in it, which
# indicates a closer relationship with the root
# column. Also take into account the "weight"
# attribute which CompoundSelect() uses to give
# higher precedence to columns based on vertical
# position in the compound statement, and discard
# columns that have no reference to the target
# column (also occurs with CompoundSelect)
col_distance = util.reduce(operator.add,
[sc._annotations.get('weight', 1) for sc in
col.proxy_set if sc.shares_lineage(column)])
c_distance = util.reduce(operator.add,
[sc._annotations.get('weight', 1) for sc in
c.proxy_set if sc.shares_lineage(column)])
if c_distance < col_distance:
col, intersect = c, i
return col
@property
def description(self):
"""a brief description of this FromClause.
Used primarily for error message formatting.
"""
return getattr(self, 'name', self.__class__.__name__ + " object")
def _reset_exported(self):
"""delete memoized collections when a FromClause is cloned."""
for attr in '_columns', '_primary_key_foreign_keys', \
'locate_all_froms':
self.__dict__.pop(attr, None)
@util.memoized_property
def _columns(self):
"""Return the collection of Column objects contained by this
FromClause."""
self._export_columns()
return self._columns
@util.memoized_property
def _primary_key(self):
"""Return the collection of Column objects which comprise the
primary key of this FromClause."""
self._export_columns()
return self._primary_key
@util.memoized_property
def _foreign_keys(self):
"""Return the collection of ForeignKey objects which this
FromClause references."""
self._export_columns()
return self._foreign_keys
columns = property(attrgetter('_columns'), doc=_columns.__doc__)
primary_key = property(attrgetter('_primary_key'),
doc=_primary_key.__doc__)
foreign_keys = property(attrgetter('_foreign_keys'),
doc=_foreign_keys.__doc__)
# synonyms for 'columns'
c = _select_iterable = property(attrgetter('columns'),
doc=_columns.__doc__)
def _export_columns(self):
"""Initialize column collections."""
self._columns = ColumnCollection()
self._primary_key = ColumnSet()
self._foreign_keys = set()
self._populate_column_collection()
def _populate_column_collection(self):
pass
class _BindParamClause(ColumnElement):
"""Represent a bind parameter.
Public constructor is the :func:`bindparam()` function.
"""
__visit_name__ = 'bindparam'
quote = None
def __init__(self, key, value, type_=None, unique=False,
isoutparam=False, required=False,
_compared_to_operator=None,
_compared_to_type=None):
"""Construct a _BindParamClause.
:param key:
the key for this bind param. Will be used in the generated
SQL statement for dialects that use named parameters. This
value may be modified when part of a compilation operation,
if other :class:`_BindParamClause` objects exist with the same
key, or if its length is too long and truncation is
required.
:param value:
Initial value for this bind param. This value may be
overridden by the dictionary of parameters sent to statement
compilation/execution.
:param type\_:
A ``TypeEngine`` object that will be used to pre-process the
value corresponding to this :class:`_BindParamClause` at
execution time.
:param unique:
if True, the key name of this BindParamClause will be
modified if another :class:`_BindParamClause` of the same name
already has been located within the containing
:class:`ClauseElement`.
:param required:
a value is required at execution time.
:param isoutparam:
if True, the parameter should be treated like a stored procedure
"OUT" parameter.
"""
if unique:
self.key = _generated_label('%%(%d %s)s' % (id(self), key
or 'param'))
else:
self.key = key or _generated_label('%%(%d param)s'
% id(self))
self._orig_key = key or 'param'
self.unique = unique
self.value = value
self.isoutparam = isoutparam
self.required = required
if type_ is None:
if _compared_to_type is not None:
self.type = \
_compared_to_type._coerce_compared_value(
_compared_to_operator, value)
else:
self.type = sqltypes.type_map.get(type(value),
sqltypes.NULLTYPE)
elif isinstance(type_, type):
self.type = type_()
else:
self.type = type_
def _clone(self):
c = ClauseElement._clone(self)
if self.unique:
c.key = _generated_label('%%(%d %s)s' % (id(c), c._orig_key
or 'param'))
return c
def _convert_to_unique(self):
if not self.unique:
self.unique = True
self.key = _generated_label('%%(%d %s)s' % (id(self),
self._orig_key or 'param'))
def bind_processor(self, dialect):
return self.type.dialect_impl(dialect).bind_processor(dialect)
def compare(self, other, **kw):
"""Compare this :class:`_BindParamClause` to the given
clause."""
return isinstance(other, _BindParamClause) \
and self.type._compare_type_affinity(other.type) \
and self.value == other.value
def __getstate__(self):
"""execute a deferred value for serialization purposes."""
d = self.__dict__.copy()
v = self.value
if util.callable(v):
v = v()
d['value'] = v
return d
def __repr__(self):
return '_BindParamClause(%r, %r, type_=%r)' % (self.key,
self.value, self.type)
class _TypeClause(ClauseElement):
"""Handle a type keyword in a SQL statement.
Used by the ``Case`` statement.
"""
__visit_name__ = 'typeclause'
def __init__(self, type):
self.type = type
class _Generative(object):
"""Allow a ClauseElement to generate itself via the
@_generative decorator.
"""
def _generate(self):
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
return s
class Executable(_Generative):
"""Mark a ClauseElement as supporting execution.
:class:`Executable` is a superclass for all "statement" types
of objects, including :func:`select`, :func:`delete`, :func:`update`,
:func:`insert`, :func:`text`.
"""
supports_execution = True
_execution_options = util.frozendict()
@_generative
def execution_options(self, **kw):
""" Set non-SQL options for the statement which take effect during
execution.
Current options include:
* autocommit - when True, a COMMIT will be invoked after execution
when executed in 'autocommit' mode, i.e. when an explicit
transaction is not begun on the connection. Note that DBAPI
connections by default are always in a transaction - SQLAlchemy uses
rules applied to different kinds of statements to determine if
COMMIT will be invoked in order to provide its "autocommit" feature.
Typically, all INSERT/UPDATE/DELETE statements as well as
CREATE/DROP statements have autocommit behavior enabled; SELECT
constructs do not. Use this option when invoking a SELECT or other
specific SQL construct where COMMIT is desired (typically when
calling stored procedures and such).
* stream_results - indicate to the dialect that results should be
"streamed" and not pre-buffered, if possible. This is a limitation
of many DBAPIs. The flag is currently understood only by the
psycopg2 dialect.
* compiled_cache - a dictionary where :class:`Compiled` objects
will be cached when the :class:`Connection` compiles a clause
expression into a dialect- and parameter-specific
:class:`Compiled` object. It is the user's responsibility to
manage the size of this dictionary, which will have keys
corresponding to the dialect, clause element, the column
names within the VALUES or SET clause of an INSERT or UPDATE,
as well as the "batch" mode for an INSERT or UPDATE statement.
The format of this dictionary is not guaranteed to stay the
same in future releases.
This option is usually more appropriate
to use via the
:meth:`sqlalchemy.engine.base.Connection.execution_options()`
method of :class:`Connection`, rather than upon individual
statement objects, though the effect is the same.
See also:
:meth:`sqlalchemy.engine.base.Connection.execution_options()`
:meth:`sqlalchemy.orm.query.Query.execution_options()`
"""
self._execution_options = self._execution_options.union(kw)
def execute(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`."""
e = self.bind
if e is None:
label = getattr(self, 'description', self.__class__.__name__)
msg = ('This %s is not bound and does not support direct '
'execution. Supply this statement to a Connection or '
'Engine for execution. Or, assign a bind to the statement '
'or the Metadata of its underlying tables to enable '
'implicit execution via this method.' % label)
raise exc.UnboundExecutionError(msg)
return e._execute_clauseelement(self, multiparams, params)
def scalar(self, *multiparams, **params):
"""Compile and execute this :class:`.Executable`, returning the
result's scalar representation.
"""
return self.execute(*multiparams, **params).scalar()
# legacy, some outside users may be calling this
_Executable = Executable
class _TextClause(Executable, ClauseElement):
"""Represent a literal SQL text fragment.
Public constructor is the :func:`text()` function.
"""
__visit_name__ = 'textclause'
_bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
_execution_options = \
Executable._execution_options.union({'autocommit'
: PARSE_AUTOCOMMIT})
@property
def _select_iterable(self):
return (self,)
_hide_froms = []
def __init__(
self,
text='',
bind=None,
bindparams=None,
typemap=None,
autocommit=None,
):
self._bind = bind
self.bindparams = {}
self.typemap = typemap
if autocommit is not None:
util.warn_deprecated('autocommit on text() is deprecated. '
'Use .execution_options(autocommit=Tru'
'e)')
self._execution_options = \
self._execution_options.union({'autocommit'
: autocommit})
if typemap is not None:
for key in typemap.keys():
typemap[key] = sqltypes.to_instance(typemap[key])
def repl(m):
self.bindparams[m.group(1)] = bindparam(m.group(1))
return ':%s' % m.group(1)
# scan the string and search for bind parameter names, add them
# to the list of bindparams
self.text = self._bind_params_regex.sub(repl, text)
if bindparams is not None:
for b in bindparams:
self.bindparams[b.key] = b
@property
def type(self):
if self.typemap is not None and len(self.typemap) == 1:
return list(self.typemap)[0]
else:
return sqltypes.NULLTYPE
def self_group(self, against=None):
if against is operators.in_op:
return _Grouping(self)
else:
return self
def _copy_internals(self, clone=_clone):
self.bindparams = dict((b.key, clone(b))
for b in self.bindparams.values())
def get_children(self, **kwargs):
return self.bindparams.values()
class _Null(ColumnElement):
"""Represent the NULL keyword in a SQL statement.
Public constructor is the :func:`null()` function.
"""
__visit_name__ = 'null'
def __init__(self):
self.type = sqltypes.NULLTYPE
class ClauseList(ClauseElement):
"""Describe a list of clauses, separated by an operator.
By default, is comma-separated, such as a column listing.
"""
__visit_name__ = 'clauselist'
def __init__(self, *clauses, **kwargs):
self.operator = kwargs.pop('operator', operators.comma_op)
self.group = kwargs.pop('group', True)
self.group_contents = kwargs.pop('group_contents', True)
if self.group_contents:
self.clauses = [
_literal_as_text(clause).self_group(against=self.operator)
for clause in clauses if clause is not None]
else:
self.clauses = [
_literal_as_text(clause)
for clause in clauses if clause is not None]
@util.memoized_property
def type(self):
if self.clauses:
return self.clauses[0].type
else:
return sqltypes.NULLTYPE
def __iter__(self):
return iter(self.clauses)
def __len__(self):
return len(self.clauses)
@property
def _select_iterable(self):
return iter(self)
def append(self, clause):
# TODO: not sure if i like the 'group_contents' flag. need to
# define the difference between a ClauseList of ClauseLists,
# and a "flattened" ClauseList of ClauseLists. flatten()
# method ?
if self.group_contents:
self.clauses.append(_literal_as_text(clause).\
self_group(against=self.operator))
else:
self.clauses.append(_literal_as_text(clause))
def _copy_internals(self, clone=_clone):
self.clauses = [clone(clause) for clause in self.clauses]
def get_children(self, **kwargs):
return self.clauses
@property
def _from_objects(self):
return list(itertools.chain(*[c._from_objects for c in self.clauses]))
def self_group(self, against=None):
if self.group and self.operator is not against and \
operators.is_precedent(self.operator, against):
return _Grouping(self)
else:
return self
def compare(self, other, **kw):
"""Compare this :class:`ClauseList` to the given :class:`ClauseList`,
including a comparison of all the clause items.
"""
if not isinstance(other, ClauseList) and len(self.clauses) == 1:
return self.clauses[0].compare(other, **kw)
elif isinstance(other, ClauseList) and \
len(self.clauses) == len(other.clauses):
for i in range(0, len(self.clauses)):
if not self.clauses[i].compare(other.clauses[i], **kw):
return False
else:
return self.operator == other.operator
else:
return False
class BooleanClauseList(ClauseList, ColumnElement):
__visit_name__ = 'clauselist'
def __init__(self, *clauses, **kwargs):
super(BooleanClauseList, self).__init__(*clauses, **kwargs)
self.type = sqltypes.to_instance(kwargs.get('type_',
sqltypes.Boolean))
@property
def _select_iterable(self):
return (self, )
class _Tuple(ClauseList, ColumnElement):
def __init__(self, *clauses, **kw):
clauses = [_literal_as_binds(c) for c in clauses]
super(_Tuple, self).__init__(*clauses, **kw)
self.type = _type_from_args(clauses)
@property
def _select_iterable(self):
return (self, )
def _bind_param(self, operator, obj):
return _Tuple(*[
_BindParamClause(None, o, _compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
for o in obj
]).self_group()
class _Case(ColumnElement):
__visit_name__ = 'case'
def __init__(self, whens, value=None, else_=None):
try:
whens = util.dictlike_iteritems(whens)
except TypeError:
pass
if value is not None:
whenlist = [
(_literal_as_binds(c).self_group(),
_literal_as_binds(r)) for (c, r) in whens
]
else:
whenlist = [
(_no_literals(c).self_group(),
_literal_as_binds(r)) for (c, r) in whens
]
if whenlist:
type_ = list(whenlist[-1])[-1].type
else:
type_ = None
if value is None:
self.value = None
else:
self.value = _literal_as_binds(value)
self.type = type_
self.whens = whenlist
if else_ is not None:
self.else_ = _literal_as_binds(else_)
else:
self.else_ = None
def _copy_internals(self, clone=_clone):
if self.value is not None:
self.value = clone(self.value)
self.whens = [(clone(x), clone(y)) for x, y in self.whens]
if self.else_ is not None:
self.else_ = clone(self.else_)
def get_children(self, **kwargs):
if self.value is not None:
yield self.value
for x, y in self.whens:
yield x
yield y
if self.else_ is not None:
yield self.else_
@property
def _from_objects(self):
return list(itertools.chain(*[x._from_objects for x in
self.get_children()]))
class FunctionElement(Executable, ColumnElement, FromClause):
"""Base for SQL function-oriented constructs."""
def __init__(self, *clauses, **kwargs):
args = [_literal_as_binds(c, self.name) for c in clauses]
self.clause_expr = ClauseList(
operator=operators.comma_op,
group_contents=True, *args).\
self_group()
@property
def columns(self):
return [self]
@util.memoized_property
def clauses(self):
return self.clause_expr.element
@property
def _from_objects(self):
return self.clauses._from_objects
def get_children(self, **kwargs):
return self.clause_expr,
def _copy_internals(self, clone=_clone):
self.clause_expr = clone(self.clause_expr)
self._reset_exported()
util.reset_memoized(self, 'clauses')
def select(self):
s = select([self])
if self._execution_options:
s = s.execution_options(**self._execution_options)
return s
def scalar(self):
return self.select().execute().scalar()
def execute(self):
return self.select().execute()
def _bind_param(self, operator, obj):
return _BindParamClause(None, obj, _compared_to_operator=operator,
_compared_to_type=self.type, unique=True)
class Function(FunctionElement):
"""Describe a named SQL function."""
__visit_name__ = 'function'
def __init__(self, name, *clauses, **kw):
self.packagenames = kw.pop('packagenames', None) or []
self.name = name
self._bind = kw.get('bind', None)
self.type = sqltypes.to_instance(kw.get('type_', None))
FunctionElement.__init__(self, *clauses, **kw)
def _bind_param(self, operator, obj):
return _BindParamClause(self.name, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
unique=True)
class _Cast(ColumnElement):
__visit_name__ = 'cast'
def __init__(self, clause, totype, **kwargs):
self.type = sqltypes.to_instance(totype)
self.clause = _literal_as_binds(clause, None)
self.typeclause = _TypeClause(self.type)
def _copy_internals(self, clone=_clone):
self.clause = clone(self.clause)
self.typeclause = clone(self.typeclause)
def get_children(self, **kwargs):
return self.clause, self.typeclause
@property
def _from_objects(self):
return self.clause._from_objects
class _Extract(ColumnElement):
__visit_name__ = 'extract'
def __init__(self, field, expr, **kwargs):
self.type = sqltypes.Integer()
self.field = field
self.expr = _literal_as_binds(expr, None)
def _copy_internals(self, clone=_clone):
self.expr = clone(self.expr)
def get_children(self, **kwargs):
return self.expr,
@property
def _from_objects(self):
return self.expr._from_objects
class _UnaryExpression(ColumnElement):
__visit_name__ = 'unary'
def __init__(self, element, operator=None, modifier=None,
type_=None, negate=None):
self.operator = operator
self.modifier = modifier
self.element = _literal_as_text(element).\
self_group(against=self.operator or self.modifier)
self.type = sqltypes.to_instance(type_)
self.negate = negate
@property
def _from_objects(self):
return self.element._from_objects
def _copy_internals(self, clone=_clone):
self.element = clone(self.element)
def get_children(self, **kwargs):
return self.element,
def compare(self, other, **kw):
"""Compare this :class:`_UnaryExpression` against the given
:class:`ClauseElement`."""
return (
isinstance(other, _UnaryExpression) and
self.operator == other.operator and
self.modifier == other.modifier and
self.element.compare(other.element, **kw)
)
def _negate(self):
if self.negate is not None:
return _UnaryExpression(
self.element,
operator=self.negate,
negate=self.operator,
modifier=self.modifier,
type_=self.type)
else:
return super(_UnaryExpression, self)._negate()
def self_group(self, against=None):
if self.operator and operators.is_precedent(self.operator,
against):
return _Grouping(self)
else:
return self
class _BinaryExpression(ColumnElement):
"""Represent an expression that is ``LEFT <operator> RIGHT``."""
__visit_name__ = 'binary'
def __init__(self, left, right, operator, type_=None,
negate=None, modifiers=None):
self.left = _literal_as_text(left).self_group(against=operator)
self.right = _literal_as_text(right).self_group(against=operator)
self.operator = operator
self.type = sqltypes.to_instance(type_)
self.negate = negate
if modifiers is None:
self.modifiers = {}
else:
self.modifiers = modifiers
def __nonzero__(self):
try:
return self.operator(hash(self.left), hash(self.right))
except:
raise TypeError("Boolean value of this clause is not defined")
@property
def _from_objects(self):
return self.left._from_objects + self.right._from_objects
def _copy_internals(self, clone=_clone):
self.left = clone(self.left)
self.right = clone(self.right)
def get_children(self, **kwargs):
return self.left, self.right
def compare(self, other, **kw):
"""Compare this :class:`_BinaryExpression` against the
given :class:`_BinaryExpression`."""
return (
isinstance(other, _BinaryExpression) and
self.operator == other.operator and
(
self.left.compare(other.left, **kw) and
self.right.compare(other.right, **kw) or
(
operators.is_commutative(self.operator) and
self.left.compare(other.right, **kw) and
self.right.compare(other.left, **kw)
)
)
)
def self_group(self, against=None):
# use small/large defaults for comparison so that unknown
# operators are always parenthesized
if self.operator is not against and \
operators.is_precedent(self.operator, against):
return _Grouping(self)
else:
return self
def _negate(self):
if self.negate is not None:
return _BinaryExpression(
self.left,
self.right,
self.negate,
negate=self.operator,
type_=sqltypes.BOOLEANTYPE,
modifiers=self.modifiers)
else:
return super(_BinaryExpression, self)._negate()
class _Exists(_UnaryExpression):
__visit_name__ = _UnaryExpression.__visit_name__
_from_objects = []
def __init__(self, *args, **kwargs):
if args and isinstance(args[0], (_SelectBaseMixin, _ScalarSelect)):
s = args[0]
else:
if not args:
args = ([literal_column('*')],)
s = select(*args, **kwargs).as_scalar().self_group()
_UnaryExpression.__init__(self, s, operator=operators.exists,
type_=sqltypes.Boolean)
def select(self, whereclause=None, **params):
return select([self], whereclause, **params)
def correlate(self, fromclause):
e = self._clone()
e.element = self.element.correlate(fromclause).self_group()
return e
def select_from(self, clause):
"""return a new exists() construct with the given expression set as
its FROM clause.
"""
e = self._clone()
e.element = self.element.select_from(clause).self_group()
return e
def where(self, clause):
"""return a new exists() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
e = self._clone()
e.element = self.element.where(clause).self_group()
return e
class Join(FromClause):
"""represent a ``JOIN`` construct between two :class:`FromClause`
elements.
The public constructor function for :class:`Join` is the module-level
:func:`join()` function, as well as the :func:`join()` method available
off all :class:`FromClause` subclasses.
"""
__visit_name__ = 'join'
def __init__(self, left, right, onclause=None, isouter=False):
self.left = _literal_as_text(left)
self.right = _literal_as_text(right).self_group()
if onclause is None:
self.onclause = self._match_primaries(self.left, self.right)
else:
self.onclause = onclause
self.isouter = isouter
self.__folded_equivalents = None
@property
def description(self):
return "Join object on %s(%d) and %s(%d)" % (
self.left.description,
id(self.left),
self.right.description,
id(self.right))
def is_derived_from(self, fromclause):
return fromclause is self or \
self.left.is_derived_from(fromclause) or\
self.right.is_derived_from(fromclause)
def self_group(self, against=None):
return _FromGrouping(self)
def _populate_column_collection(self):
columns = [c for c in self.left.columns] + \
[c for c in self.right.columns]
global sql_util
if not sql_util:
from sqlalchemy.sql import util as sql_util
self._primary_key.extend(sql_util.reduce_columns(
(c for c in columns if c.primary_key), self.onclause))
self._columns.update((col._label, col) for col in columns)
self._foreign_keys.update(itertools.chain(
*[col.foreign_keys for col in columns]))
def _copy_internals(self, clone=_clone):
self._reset_exported()
self.left = clone(self.left)
self.right = clone(self.right)
self.onclause = clone(self.onclause)
self.__folded_equivalents = None
def get_children(self, **kwargs):
return self.left, self.right, self.onclause
def _match_primaries(self, left, right):
global sql_util
if not sql_util:
from sqlalchemy.sql import util as sql_util
if isinstance(left, Join):
left_right = left.right
else:
left_right = None
return sql_util.join_condition(left, right, a_subset=left_right)
def select(self, whereclause=None, fold_equivalents=False, **kwargs):
"""Create a :class:`Select` from this :class:`Join`.
:param whereclause: the WHERE criterion that will be sent to
the :func:`select()` function
:param fold_equivalents: based on the join criterion of this
:class:`Join`, do not include
repeat column names in the column list of the resulting
select, for columns that are calculated to be "equivalent"
based on the join criterion of this :class:`Join`. This will
recursively apply to any joins directly nested by this one
as well.
:param \**kwargs: all other kwargs are sent to the
underlying :func:`select()` function.
"""
global sql_util
if not sql_util:
from sqlalchemy.sql import util as sql_util
if fold_equivalents:
collist = sql_util.folded_equivalents(self)
else:
collist = [self.left, self.right]
return select(collist, whereclause, from_obj=[self], **kwargs)
@property
def bind(self):
return self.left.bind or self.right.bind
def alias(self, name=None):
"""Create a :class:`Select` out of this :class:`Join` clause and
return an :class:`Alias` of it.
The :class:`Select` is not correlating.
"""
return self.select(use_labels=True, correlate=False).alias(name)
@property
def _hide_froms(self):
return itertools.chain(*[_from_objects(x.left, x.right)
for x in self._cloned_set])
@property
def _from_objects(self):
return [self] + \
self.onclause._from_objects + \
self.left._from_objects + \
self.right._from_objects
class Alias(FromClause):
"""Represents an table or selectable alias (AS).
Represents an alias, as typically applied to any table or
sub-select within a SQL statement using the ``AS`` keyword (or
without the keyword on certain databases such as Oracle).
This object is constructed from the :func:`alias()` module level
function as well as the :func:`alias()` method available on all
:class:`FromClause` subclasses.
"""
__visit_name__ = 'alias'
named_with_column = True
def __init__(self, selectable, alias=None):
baseselectable = selectable
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
self.supports_execution = baseselectable.supports_execution
if self.supports_execution:
self._execution_options = baseselectable._execution_options
self.element = selectable
if alias is None:
if self.original.named_with_column:
alias = getattr(self.original, 'name', None)
alias = _generated_label('%%(%d %s)s' % (id(self), alias
or 'anon'))
self.name = alias
@property
def description(self):
# Py3K
#return self.name
# Py2K
return self.name.encode('ascii', 'backslashreplace')
# end Py2K
def as_scalar(self):
try:
return self.element.as_scalar()
except AttributeError:
raise AttributeError("Element %s does not support "
"'as_scalar()'" % self.element)
def is_derived_from(self, fromclause):
if fromclause in self._cloned_set:
return True
return self.element.is_derived_from(fromclause)
def _populate_column_collection(self):
for col in self.element.columns:
col._make_proxy(self)
def _copy_internals(self, clone=_clone):
self._reset_exported()
self.element = _clone(self.element)
baseselectable = self.element
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
def get_children(self, column_collections=True,
aliased_selectables=True, **kwargs):
if column_collections:
for c in self.c:
yield c
if aliased_selectables:
yield self.element
@property
def _from_objects(self):
return [self]
@property
def bind(self):
return self.element.bind
class _Grouping(ColumnElement):
"""Represent a grouping within a column expression"""
__visit_name__ = 'grouping'
def __init__(self, element):
self.element = element
self.type = getattr(element, 'type', None)
@property
def _label(self):
return getattr(self.element, '_label', None) or self.anon_label
def _copy_internals(self, clone=_clone):
self.element = clone(self.element)
def get_children(self, **kwargs):
return self.element,
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {'element':self.element, 'type':self.type}
def __setstate__(self, state):
self.element = state['element']
self.type = state['type']
class _FromGrouping(FromClause):
"""Represent a grouping of a FROM clause"""
__visit_name__ = 'grouping'
def __init__(self, element):
self.element = element
@property
def columns(self):
return self.element.columns
@property
def _hide_froms(self):
return self.element._hide_froms
def get_children(self, **kwargs):
return self.element,
def _copy_internals(self, clone=_clone):
self.element = clone(self.element)
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {'element':self.element}
def __setstate__(self, state):
self.element = state['element']
class _Label(ColumnElement):
"""Represents a column label (AS).
Represent a label, as typically applied to any column-level
element using the ``AS`` sql keyword.
This object is constructed from the :func:`label()` module level
function as well as the :func:`label()` method available on all
:class:`ColumnElement` subclasses.
"""
__visit_name__ = 'label'
def __init__(self, name, element, type_=None):
while isinstance(element, _Label):
element = element.element
self.name = self.key = self._label = name \
or _generated_label('%%(%d %s)s' % (id(self),
getattr(element, 'name', 'anon')))
self._element = element
self._type = type_
self.quote = element.quote
self.proxies = [element]
@util.memoized_property
def type(self):
return sqltypes.to_instance(
self._type or getattr(self._element, 'type', None)
)
@util.memoized_property
def element(self):
return self._element.self_group(against=operators.as_)
@property
def primary_key(self):
return self.element.primary_key
@property
def foreign_keys(self):
return self.element.foreign_keys
def get_children(self, **kwargs):
return self.element,
def _copy_internals(self, clone=_clone):
self.element = clone(self.element)
@property
def _from_objects(self):
return self.element._from_objects
def _make_proxy(self, selectable, name = None):
if isinstance(self.element, (Selectable, ColumnElement)):
e = self.element._make_proxy(selectable, name=self.name)
else:
e = column(self.name)._make_proxy(selectable=selectable)
e.proxies.append(self)
return e
class ColumnClause(_Immutable, ColumnElement):
"""Represents a generic column expression from any textual string.
This includes columns associated with tables, aliases and select
statements, but also any arbitrary text. May or may not be bound
to an underlying :class:`Selectable`. :class:`ColumnClause` is usually
created publically via the :func:`column()` function or the
:func:`literal_column()` function.
text
the text of the element.
selectable
parent selectable.
type
``TypeEngine`` object which can associate this :class:`ColumnClause`
with a type.
is_literal
if True, the :class:`ColumnClause` is assumed to be an exact
expression that will be delivered to the output with no quoting
rules applied regardless of case sensitive settings. the
:func:`literal_column()` function is usually used to create such a
:class:`ColumnClause`.
"""
__visit_name__ = 'column'
onupdate = default = server_default = server_onupdate = None
def __init__(self, text, selectable=None, type_=None, is_literal=False):
self.key = self.name = text
self.table = selectable
self.type = sqltypes.to_instance(type_)
self.is_literal = is_literal
@util.memoized_property
def description(self):
# Py3K
#return self.name
# Py2K
return self.name.encode('ascii', 'backslashreplace')
# end Py2K
@util.memoized_property
def _label(self):
if self.is_literal:
return None
elif self.table is not None and self.table.named_with_column:
if getattr(self.table, 'schema', None):
label = self.table.schema.replace('.', '_') + "_" + \
_escape_for_generated(self.table.name) + "_" + \
_escape_for_generated(self.name)
else:
label = _escape_for_generated(self.table.name) + "_" + \
_escape_for_generated(self.name)
# ensure the label name doesn't conflict with that
# of an existing column
if label in self.table.c:
_label = label
counter = 1
while _label in self.table.c:
_label = label + "_" + str(counter)
counter += 1
label = _label
return _generated_label(label)
else:
return self.name
def label(self, name):
if name is None:
return self
else:
return super(ColumnClause, self).label(name)
@property
def _from_objects(self):
if self.table is not None:
return [self.table]
else:
return []
def _bind_param(self, operator, obj):
return _BindParamClause(self.name, obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
unique=True)
def _make_proxy(self, selectable, name=None, attach=True):
# propagate the "is_literal" flag only if we are keeping our name,
# otherwise its considered to be a label
is_literal = self.is_literal and (name is None or name == self.name)
c = self._constructor(
name or self.name,
selectable=selectable,
type_=self.type,
is_literal=is_literal
)
c.proxies = [self]
if attach:
selectable.columns[c.name] = c
return c
class TableClause(_Immutable, FromClause):
"""Represents a "table" construct.
Note that this represents tables only as another syntactical
construct within SQL expressions; it does not provide schema-level
functionality.
"""
__visit_name__ = 'table'
named_with_column = True
def __init__(self, name, *columns):
super(TableClause, self).__init__()
self.name = self.fullname = name
self._columns = ColumnCollection()
self._primary_key = ColumnSet()
self._foreign_keys = set()
for c in columns:
self.append_column(c)
def _export_columns(self):
raise NotImplementedError()
@util.memoized_property
def description(self):
# Py3K
#return self.name
# Py2K
return self.name.encode('ascii', 'backslashreplace')
# end Py2K
def append_column(self, c):
self._columns[c.name] = c
c.table = self
def get_children(self, column_collections=True, **kwargs):
if column_collections:
return [c for c in self.c]
else:
return []
def count(self, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`TableClause`."""
if self.primary_key:
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
return select(
[func.count(col).label('tbl_row_count')],
whereclause,
from_obj=[self],
**params)
def insert(self, values=None, inline=False, **kwargs):
"""Generate an :func:`insert()` construct."""
return insert(self, values=values, inline=inline, **kwargs)
def update(self, whereclause=None, values=None, inline=False, **kwargs):
"""Generate an :func:`update()` construct."""
return update(self, whereclause=whereclause,
values=values, inline=inline, **kwargs)
def delete(self, whereclause=None, **kwargs):
"""Generate a :func:`delete()` construct."""
return delete(self, whereclause, **kwargs)
@property
def _from_objects(self):
return [self]
class _SelectBaseMixin(Executable):
"""Base class for :class:`Select` and ``CompoundSelects``."""
def __init__(self,
use_labels=False,
for_update=False,
limit=None,
offset=None,
order_by=None,
group_by=None,
bind=None,
autocommit=None):
self.use_labels = use_labels
self.for_update = for_update
if autocommit is not None:
util.warn_deprecated('autocommit on select() is '
'deprecated. Use .execution_options(a'
'utocommit=True)')
self._execution_options = \
self._execution_options.union({'autocommit'
: autocommit})
self._limit = limit
self._offset = offset
self._bind = bind
self._order_by_clause = ClauseList(*util.to_list(order_by) or [])
self._group_by_clause = ClauseList(*util.to_list(group_by) or [])
def as_scalar(self):
"""return a 'scalar' representation of this selectable, which can be
used as a column expression.
Typically, a select statement which has only one column in its columns
clause is eligible to be used as a scalar expression.
The returned object is an instance of
:class:`_ScalarSelect`.
"""
return _ScalarSelect(self)
@_generative
def apply_labels(self):
"""return a new selectable with the 'use_labels' flag set to True.
This will result in column expressions being generated using labels
against their table name, such as "SELECT somecolumn AS
tablename_somecolumn". This allows selectables which contain multiple
FROM clauses to produce a unique set of column names regardless of
name conflicts among the individual FROM clauses.
"""
self.use_labels = True
def label(self, name):
"""return a 'scalar' representation of this selectable, embedded as a
subquery with a label.
See also ``as_scalar()``.
"""
return self.as_scalar().label(name)
@_generative
@util.deprecated('0.6',
message=":func:`.autocommit` is deprecated. Use "
":func:`.Executable.execution_options` with the "
"'autocommit' flag.")
def autocommit(self):
"""return a new selectable with the 'autocommit' flag set to
True."""
self._execution_options = \
self._execution_options.union({'autocommit': True})
def _generate(self):
"""Override the default _generate() method to also clear out
exported collections."""
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
s._reset_exported()
return s
@_generative
def limit(self, limit):
"""return a new selectable with the given LIMIT criterion
applied."""
self._limit = limit
@_generative
def offset(self, offset):
"""return a new selectable with the given OFFSET criterion
applied."""
self._offset = offset
@_generative
def order_by(self, *clauses):
"""return a new selectable with the given list of ORDER BY
criterion applied.
The criterion will be appended to any pre-existing ORDER BY
criterion.
"""
self.append_order_by(*clauses)
@_generative
def group_by(self, *clauses):
"""return a new selectable with the given list of GROUP BY
criterion applied.
The criterion will be appended to any pre-existing GROUP BY
criterion.
"""
self.append_group_by(*clauses)
def append_order_by(self, *clauses):
"""Append the given ORDER BY criterion applied to this selectable.
The criterion will be appended to any pre-existing ORDER BY criterion.
"""
if len(clauses) == 1 and clauses[0] is None:
self._order_by_clause = ClauseList()
else:
if getattr(self, '_order_by_clause', None) is not None:
clauses = list(self._order_by_clause) + list(clauses)
self._order_by_clause = ClauseList(*clauses)
def append_group_by(self, *clauses):
"""Append the given GROUP BY criterion applied to this selectable.
The criterion will be appended to any pre-existing GROUP BY criterion.
"""
if len(clauses) == 1 and clauses[0] is None:
self._group_by_clause = ClauseList()
else:
if getattr(self, '_group_by_clause', None) is not None:
clauses = list(self._group_by_clause) + list(clauses)
self._group_by_clause = ClauseList(*clauses)
@property
def _from_objects(self):
return [self]
class _ScalarSelect(_Grouping):
_from_objects = []
def __init__(self, element):
self.element = element
cols = list(element.c)
self.type = cols[0].type
@property
def columns(self):
raise exc.InvalidRequestError('Scalar Select expression has no '
'columns; use this object directly within a '
'column-level expression.')
c = columns
def self_group(self, **kwargs):
return self
def _make_proxy(self, selectable, name):
return list(self.inner_columns)[0]._make_proxy(selectable, name)
class CompoundSelect(_SelectBaseMixin, FromClause):
"""Forms the basis of ``UNION``, ``UNION ALL``, and other
SELECT-based set operations."""
__visit_name__ = 'compound_select'
UNION = util.symbol('UNION')
UNION_ALL = util.symbol('UNION ALL')
EXCEPT = util.symbol('EXCEPT')
EXCEPT_ALL = util.symbol('EXCEPT ALL')
INTERSECT = util.symbol('INTERSECT')
INTERSECT_ALL = util.symbol('INTERSECT ALL')
def __init__(self, keyword, *selects, **kwargs):
self._should_correlate = kwargs.pop('correlate', False)
self.keyword = keyword
self.selects = []
numcols = None
# some DBs do not like ORDER BY in the inner queries of a UNION, etc.
for n, s in enumerate(selects):
s = _clause_element_as_expr(s)
if not numcols:
numcols = len(s.c)
elif len(s.c) != numcols:
raise exc.ArgumentError('All selectables passed to '
'CompoundSelect must have identical numbers of '
'columns; select #%d has %d columns, select '
'#%d has %d' % (1, len(self.selects[0].c), n
+ 1, len(s.c)))
self.selects.append(s.self_group(self))
_SelectBaseMixin.__init__(self, **kwargs)
def self_group(self, against=None):
return _FromGrouping(self)
def is_derived_from(self, fromclause):
for s in self.selects:
if s.is_derived_from(fromclause):
return True
return False
def _populate_column_collection(self):
for cols in zip(*[s.c for s in self.selects]):
# this is a slightly hacky thing - the union exports a
# column that resembles just that of the *first* selectable.
# to get at a "composite" column, particularly foreign keys,
# you have to dig through the proxies collection which we
# generate below. We may want to improve upon this, such as
# perhaps _make_proxy can accept a list of other columns
# that are "shared" - schema.column can then copy all the
# ForeignKeys in. this would allow the union() to have all
# those fks too.
proxy = cols[0]._make_proxy(self, name=self.use_labels
and cols[0]._label or None)
# hand-construct the "proxies" collection to include all
# derived columns place a 'weight' annotation corresponding
# to how low in the list of select()s the column occurs, so
# that the corresponding_column() operation can resolve
# conflicts
proxy.proxies = [c._annotate({'weight': i + 1}) for (i,
c) in enumerate(cols)]
def _copy_internals(self, clone=_clone):
self._reset_exported()
self.selects = [clone(s) for s in self.selects]
if hasattr(self, '_col_map'):
del self._col_map
for attr in ('_order_by_clause', '_group_by_clause'):
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr)))
def get_children(self, column_collections=True, **kwargs):
return (column_collections and list(self.c) or []) \
+ [self._order_by_clause, self._group_by_clause] \
+ list(self.selects)
def bind(self):
if self._bind:
return self._bind
for s in self.selects:
e = s.bind
if e:
return e
else:
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class Select(_SelectBaseMixin, FromClause):
"""Represents a ``SELECT`` statement.
Select statements support appendable clauses, as well as the
ability to execute themselves and return a result set.
"""
__visit_name__ = 'select'
_prefixes = ()
_hints = util.frozendict()
def __init__(self,
columns,
whereclause=None,
from_obj=None,
distinct=False,
having=None,
correlate=True,
prefixes=None,
**kwargs):
"""Construct a Select object.
The public constructor for Select is the
:func:`select` function; see that function for
argument descriptions.
Additional generative and mutator methods are available on the
:class:`_SelectBaseMixin` superclass.
"""
self._should_correlate = correlate
self._distinct = distinct
self._correlate = set()
self._froms = util.OrderedSet()
try:
cols_present = bool(columns)
except TypeError:
raise exc.ArgumentError("columns argument to select() must "
"be a Python list or other iterable")
if cols_present:
self._raw_columns = []
for c in columns:
c = _literal_as_column(c)
if isinstance(c, _ScalarSelect):
c = c.self_group(against=operators.comma_op)
self._raw_columns.append(c)
self._froms.update(_from_objects(*self._raw_columns))
else:
self._raw_columns = []
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
self._froms.update(_from_objects(self._whereclause))
else:
self._whereclause = None
if from_obj is not None:
for f in util.to_list(from_obj):
if _is_literal(f):
self._froms.add(_TextClause(f))
else:
self._froms.add(f)
if having is not None:
self._having = _literal_as_text(having)
else:
self._having = None
if prefixes:
self._prefixes = tuple([_literal_as_text(p) for p in prefixes])
_SelectBaseMixin.__init__(self, **kwargs)
def _get_display_froms(self, existing_froms=None):
"""Return the full list of 'from' clauses to be displayed.
Takes into account a set of existing froms which may be
rendered in the FROM clause of enclosing selects; this Select
may want to leave those absent if it is automatically
correlating.
"""
froms = self._froms
toremove = itertools.chain(*[f._hide_froms for f in froms])
if toremove:
froms = froms.difference(toremove)
if len(froms) > 1 or self._correlate:
if self._correlate:
froms = froms.difference(_cloned_intersection(froms,
self._correlate))
if self._should_correlate and existing_froms:
froms = froms.difference(_cloned_intersection(froms,
existing_froms))
if not len(froms):
raise exc.InvalidRequestError("Select statement '%s"
"' returned no FROM clauses due to "
"auto-correlation; specify "
"correlate(<tables>) to control "
"correlation manually." % self)
return froms
@property
def froms(self):
"""Return the displayed list of FromClause elements."""
return self._get_display_froms()
@_generative
def with_hint(self, selectable, text, dialect_name=None):
"""Add an indexing hint for the given selectable to this
:class:`Select`.
The text of the hint is written specific to a specific backend, and
typically uses Python string substitution syntax to render the name
of the table or alias, such as for Oracle::
select([mytable]).with_hint(mytable, "+ index(%(name)s
ix_mytable)")
Would render SQL as::
select /*+ index(mytable ix_mytable) */ ... from mytable
The ``dialect_name`` option will limit the rendering of a particular
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
select([mytable]).\
with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
"""
if not dialect_name:
dialect_name = '*'
self._hints = self._hints.union({(selectable, dialect_name):text})
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
"Call as_scalar() on this Select object "
"to return a 'scalar' version of this Select.")
@util.memoized_instancemethod
def locate_all_froms(self):
"""return a Set of all FromClause elements referenced by this Select.
This set is a superset of that returned by the ``froms`` property,
which is specifically for those FromClause elements that would
actually be rendered.
"""
return self._froms.union(_from_objects(*list(self._froms)))
@property
def inner_columns(self):
"""an iterator of all ColumnElement expressions which would
be rendered into the columns clause of the resulting SELECT statement.
"""
return _select_iterables(self._raw_columns)
def is_derived_from(self, fromclause):
if self in fromclause._cloned_set:
return True
for f in self.locate_all_froms():
if f.is_derived_from(fromclause):
return True
return False
def _copy_internals(self, clone=_clone):
self._reset_exported()
from_cloned = dict((f, clone(f))
for f in self._froms.union(self._correlate))
self._froms = util.OrderedSet(from_cloned[f] for f in self._froms)
self._correlate = set(from_cloned[f] for f in self._correlate)
self._raw_columns = [clone(c) for c in self._raw_columns]
for attr in '_whereclause', '_having', '_order_by_clause', \
'_group_by_clause':
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr)))
def get_children(self, column_collections=True, **kwargs):
"""return child elements as per the ClauseElement specification."""
return (column_collections and list(self.columns) or []) + \
self._raw_columns + list(self._froms) + \
[x for x in
(self._whereclause, self._having,
self._order_by_clause, self._group_by_clause)
if x is not None]
@_generative
def column(self, column):
"""return a new select() construct with the given column expression
added to its columns clause.
"""
column = _literal_as_column(column)
if isinstance(column, _ScalarSelect):
column = column.self_group(against=operators.comma_op)
self._raw_columns = self._raw_columns + [column]
self._froms = self._froms.union(_from_objects(column))
@_generative
def with_only_columns(self, columns):
"""return a new select() construct with its columns clause replaced
with the given columns.
"""
self._raw_columns = [
isinstance(c, _ScalarSelect) and
c.self_group(against=operators.comma_op) or c
for c in [_literal_as_column(c) for c in columns]
]
@_generative
def where(self, whereclause):
"""return a new select() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
self.append_whereclause(whereclause)
@_generative
def having(self, having):
"""return a new select() construct with the given expression added to
its HAVING clause, joined to the existing clause via AND, if any.
"""
self.append_having(having)
@_generative
def distinct(self):
"""return a new select() construct which will apply DISTINCT to its
columns clause.
"""
self._distinct = True
@_generative
def prefix_with(self, clause):
"""return a new select() construct which will apply the given
expression to the start of its columns clause, not using any commas.
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
@_generative
def select_from(self, fromclause):
"""return a new select() construct with the given FROM expression
applied to its list of FROM objects.
"""
fromclause = _literal_as_text(fromclause)
self._froms = self._froms.union([fromclause])
@_generative
def correlate(self, *fromclauses):
"""return a new select() construct which will correlate the given FROM
clauses to that of an enclosing select(), if a match is found.
By "match", the given fromclause must be present in this select's
list of FROM objects and also present in an enclosing select's list of
FROM objects.
Calling this method turns off the select's default behavior of
"auto-correlation". Normally, select() auto-correlates all of its FROM
clauses to those of an embedded select when compiled.
If the fromclause is None, correlation is disabled for the returned
select().
"""
self._should_correlate = False
if fromclauses == (None,):
self._correlate = set()
else:
self._correlate = self._correlate.union(fromclauses)
def append_correlation(self, fromclause):
"""append the given correlation expression to this select()
construct."""
self._should_correlate = False
self._correlate = self._correlate.union([fromclause])
def append_column(self, column):
"""append the given column expression to the columns clause of this
select() construct.
"""
column = _literal_as_column(column)
if isinstance(column, _ScalarSelect):
column = column.self_group(against=operators.comma_op)
self._raw_columns = self._raw_columns + [column]
self._froms = self._froms.union(_from_objects(column))
self._reset_exported()
def append_prefix(self, clause):
"""append the given columns clause prefix expression to this select()
construct.
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
def append_whereclause(self, whereclause):
"""append the given expression to this select() construct's WHERE
criterion.
The expression will be joined to existing WHERE criterion via AND.
"""
whereclause = _literal_as_text(whereclause)
self._froms = self._froms.union(_from_objects(whereclause))
if self._whereclause is not None:
self._whereclause = and_(self._whereclause, whereclause)
else:
self._whereclause = whereclause
def append_having(self, having):
"""append the given expression to this select() construct's HAVING
criterion.
The expression will be joined to existing HAVING criterion via AND.
"""
if self._having is not None:
self._having = and_(self._having, _literal_as_text(having))
else:
self._having = _literal_as_text(having)
def append_from(self, fromclause):
"""append the given FromClause expression to this select() construct's
FROM clause.
"""
if _is_literal(fromclause):
fromclause = _TextClause(fromclause)
self._froms = self._froms.union([fromclause])
def __exportable_columns(self):
for column in self._raw_columns:
if isinstance(column, Selectable):
for co in column.columns:
yield co
elif isinstance(column, ColumnElement):
yield column
else:
continue
def _populate_column_collection(self):
for c in self.__exportable_columns():
c._make_proxy(self, name=self.use_labels and c._label or None)
def self_group(self, against=None):
"""return a 'grouping' construct as per the ClauseElement
specification.
This produces an element that can be embedded in an expression. Note
that this method is called automatically as needed when constructing
expressions.
"""
if isinstance(against, CompoundSelect):
return self
return _FromGrouping(self)
def union(self, other, **kwargs):
"""return a SQL UNION of this select() construct against the given
selectable."""
return union(self, other, **kwargs)
def union_all(self, other, **kwargs):
"""return a SQL UNION ALL of this select() construct against the given
selectable.
"""
return union_all(self, other, **kwargs)
def except_(self, other, **kwargs):
"""return a SQL EXCEPT of this select() construct against the given
selectable."""
return except_(self, other, **kwargs)
def except_all(self, other, **kwargs):
"""return a SQL EXCEPT ALL of this select() construct against the
given selectable.
"""
return except_all(self, other, **kwargs)
def intersect(self, other, **kwargs):
"""return a SQL INTERSECT of this select() construct against the given
selectable.
"""
return intersect(self, other, **kwargs)
def intersect_all(self, other, **kwargs):
"""return a SQL INTERSECT ALL of this select() construct against the
given selectable.
"""
return intersect_all(self, other, **kwargs)
def bind(self):
if self._bind:
return self._bind
if not self._froms:
for c in self._raw_columns:
e = c.bind
if e:
self._bind = e
return e
else:
e = list(self._froms)[0].bind
if e:
self._bind = e
return e
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class _UpdateBase(Executable, ClauseElement):
"""Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements."""
__visit_name__ = 'update_base'
_execution_options = \
Executable._execution_options.union({'autocommit': True})
kwargs = util.frozendict()
def _process_colparams(self, parameters):
if isinstance(parameters, (list, tuple)):
pp = {}
for i, c in enumerate(self.table.c):
pp[c.key] = parameters[i]
return pp
else:
return parameters
def params(self, *arg, **kw):
raise NotImplementedError(
"params() is not supported for INSERT/UPDATE/DELETE statements."
" To set the values for an INSERT or UPDATE statement, use"
" stmt.values(**parameters).")
def bind(self):
return self._bind or self.table.bind
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
_returning_re = re.compile(r'(?:firebird|postgres(?:ql)?)_returning')
def _process_deprecated_kw(self, kwargs):
for k in list(kwargs):
m = self._returning_re.match(k)
if m:
self._returning = kwargs.pop(k)
util.warn_deprecated(
"The %r argument is deprecated. Please "
"use statement.returning(col1, col2, ...)" % k
)
return kwargs
@_generative
def returning(self, *cols):
"""Add a RETURNING or equivalent clause to this statement.
The given list of columns represent columns within the table that is
the target of the INSERT, UPDATE, or DELETE. Each element can be any
column expression. :class:`~sqlalchemy.schema.Table` objects will be
expanded into their individual columns.
Upon compilation, a RETURNING clause, or database equivalent,
will be rendered within the statement. For INSERT and UPDATE,
the values are the newly inserted/updated values. For DELETE,
the values are those of the rows which were deleted.
Upon execution, the values of the columns to be returned
are made available via the result set and can be iterated
using ``fetchone()`` and similar. For DBAPIs which do not
natively support returning values (i.e. cx_oracle),
SQLAlchemy will approximate this behavior at the result level
so that a reasonable amount of behavioral neutrality is
provided.
Note that not all databases/DBAPIs
support RETURNING. For those backends with no support,
an exception is raised upon compilation and/or execution.
For those who do support it, the functionality across backends
varies greatly, including restrictions on executemany()
and other statements which return multiple rows. Please
read the documentation notes for the database in use in
order to determine the availability of RETURNING.
"""
self._returning = cols
class _ValuesBase(_UpdateBase):
__visit_name__ = 'values_base'
def __init__(self, table, values):
self.table = table
self.parameters = self._process_colparams(values)
@_generative
def values(self, *args, **kwargs):
"""specify the VALUES clause for an INSERT statement, or the SET
clause for an UPDATE.
\**kwargs
key=<somevalue> arguments
\*args
A single dictionary can be sent as the first positional
argument. This allows non-string based keys, such as Column
objects, to be used.
"""
if args:
v = args[0]
else:
v = {}
if self.parameters is None:
self.parameters = self._process_colparams(v)
self.parameters.update(kwargs)
else:
self.parameters = self.parameters.copy()
self.parameters.update(self._process_colparams(v))
self.parameters.update(kwargs)
class Insert(_ValuesBase):
"""Represent an INSERT construct.
The :class:`Insert` object is created using the :func:`insert()` function.
"""
__visit_name__ = 'insert'
_prefixes = ()
def __init__(self,
table,
values=None,
inline=False,
bind=None,
prefixes=None,
returning=None,
**kwargs):
_ValuesBase.__init__(self, table, values)
self._bind = bind
self.select = None
self.inline = inline
self._returning = returning
if prefixes:
self._prefixes = tuple([_literal_as_text(p) for p in prefixes])
if kwargs:
self.kwargs = self._process_deprecated_kw(kwargs)
def get_children(self, **kwargs):
if self.select is not None:
return self.select,
else:
return ()
def _copy_internals(self, clone=_clone):
# TODO: coverage
self.parameters = self.parameters.copy()
@_generative
def prefix_with(self, clause):
"""Add a word or expression between INSERT and INTO. Generative.
If multiple prefixes are supplied, they will be separated with
spaces.
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
class Update(_ValuesBase):
"""Represent an Update construct.
The :class:`Update` object is created using the :func:`update()` function.
"""
__visit_name__ = 'update'
def __init__(self,
table,
whereclause,
values=None,
inline=False,
bind=None,
returning=None,
**kwargs):
_ValuesBase.__init__(self, table, values)
self._bind = bind
self._returning = returning
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
else:
self._whereclause = None
self.inline = inline
if kwargs:
self.kwargs = self._process_deprecated_kw(kwargs)
def get_children(self, **kwargs):
if self._whereclause is not None:
return self._whereclause,
else:
return ()
def _copy_internals(self, clone=_clone):
# TODO: coverage
self._whereclause = clone(self._whereclause)
self.parameters = self.parameters.copy()
@_generative
def where(self, whereclause):
"""return a new update() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
_literal_as_text(whereclause))
else:
self._whereclause = _literal_as_text(whereclause)
class Delete(_UpdateBase):
"""Represent a DELETE construct.
The :class:`Delete` object is created using the :func:`delete()` function.
"""
__visit_name__ = 'delete'
def __init__(self,
table,
whereclause,
bind=None,
returning =None,
**kwargs):
self._bind = bind
self.table = table
self._returning = returning
if whereclause is not None:
self._whereclause = _literal_as_text(whereclause)
else:
self._whereclause = None
if kwargs:
self.kwargs = self._process_deprecated_kw(kwargs)
def get_children(self, **kwargs):
if self._whereclause is not None:
return self._whereclause,
else:
return ()
@_generative
def where(self, whereclause):
"""Add the given WHERE clause to a newly returned delete construct."""
if self._whereclause is not None:
self._whereclause = and_(self._whereclause,
_literal_as_text(whereclause))
else:
self._whereclause = _literal_as_text(whereclause)
def _copy_internals(self, clone=_clone):
# TODO: coverage
self._whereclause = clone(self._whereclause)
class _IdentifiedClause(Executable, ClauseElement):
__visit_name__ = 'identified'
_execution_options = \
Executable._execution_options.union({'autocommit': False})
quote = None
def __init__(self, ident):
self.ident = ident
class SavepointClause(_IdentifiedClause):
__visit_name__ = 'savepoint'
class RollbackToSavepointClause(_IdentifiedClause):
__visit_name__ = 'rollback_to_savepoint'
class ReleaseSavepointClause(_IdentifiedClause):
__visit_name__ = 'release_savepoint'
|
dbbhattacharya/kitsune
|
vendor/packages/sqlalchemy/lib/sqlalchemy/sql/expression.py
|
Python
|
bsd-3-clause
| 152,008 | 0.003592 |
"""
reviewview.py
Contains administrative views for working with reviews.
"""
from admin_helpers import *
from flask import flash
from flask.ext.admin.actions import action
from flask.ext.admin.contrib.sqla import ModelView
from wtforms import IntegerField, validators
import remedy.rad.reviewservice
from remedy.rad.models import Review
class ReviewView(AdminAuthMixin, ModelView):
"""
An administrative view for working with resource reviews.
"""
# Disable model creation
can_create = False
column_select_related_list = (Review.resource, Review.user)
column_default_sort = (Review.date_created, True)
column_sortable_list = ('composite_rating', 'visible', ('date_created', Review.date_created))
column_list = ('composite_rating', 'resource.name', 'user.username',
'visible', 'date_created')
column_labels = {
'composite_rating': 'Comp. Rating',
'rating': 'Provider Rating',
'staff_rating': 'Staff Rating',
'intake_rating': 'Intake Rating',
'resource.name': 'Resource',
'user.username': 'User',
'visible': 'Visible',
'date_created': 'Date Created',
'ip': 'IP'
}
column_descriptions = dict(composite_rating='The average of the rating fields.')
column_searchable_list = ('text',)
column_filters = ('visible','composite_rating','rating','staff_rating',
'intake_rating','ip',)
form_excluded_columns = ('date_created','is_old_review','old_reviews',
'new_review_id','new_review', 'composite_rating')
# Mark fields visible but read-only. If we use
# "disabled" this ends up clearing out the value.
form_widget_args = {
'ip': {
'readonly': 'readonly'
}
}
def scaffold_form(self):
"""
Sets up the review form to ensure that the rating field
behaves on a 1-5 scale.
"""
form_class = super(ReviewView, self).scaffold_form()
form_class.rating = IntegerField('Provider Rating', validators=[
validators.Optional(),
validators.NumberRange(min=1, max=5)
])
form_class.staff_rating = IntegerField('Staff Rating', validators=[
validators.Optional(),
validators.NumberRange(min=1, max=5)
])
form_class.intake_rating = IntegerField('Intake Rating', validators=[
validators.Optional(),
validators.NumberRange(min=1, max=5)
])
return form_class
def delete_model(self, model):
"""
Deletes the specified review.
Args:
model: The review to delete.
"""
try:
remedy.rad.reviewservice.delete(self.session, model)
flash('Review deleted.')
return True
except Exception as ex:
if not super(ReviewView, self).handle_view_exception(ex):
flash(gettext('Failed to delete model. %(error)s', error=str(ex)), 'error')
log.exception('Failed to delete model')
self.session.rollback()
return False
@action('togglevisible',
'Toggle Visibility',
'Are you sure you wish to toggle visibility for the selected reviews?')
def action_togglevisible(self, ids):
"""
Attempts to toggle visibility for each of the specified reviews.
Args:
ids: The list of review IDs, indicating which reviews
should have their visibility toggled.
"""
# Load all reviews by the set of IDs
target_reviews = self.get_query().filter(self.model.id.in_(ids)).all()
# Build a list of all the results
results = []
if len(target_reviews) > 0:
for review in target_reviews:
# Build a helpful string to use for messages.
review_str = 'review #' + str(review.id) + ' (' + review.resource.name + \
' by ' + review.user.username + ')'
visible_status = ''
try:
if not review.visible:
review.visible = True
visible_status = ' as visible'
else:
review.visible = False
visible_status = ' as not visible'
except Exception as ex:
results.append('Error changing ' + review_str + ': ' + str(ex))
else:
results.append('Marked ' + review_str + visible_status + '.')
# Save our changes.
self.session.commit()
else:
results.append('No reviews were selected.')
# Flash the results of everything
flash("\n".join(msg for msg in results))
def __init__(self, session, **kwargs):
super(ReviewView, self).__init__(Review, session, **kwargs)
|
AllieDeford/radremedy
|
remedy/admin_views/reviewview.py
|
Python
|
bsd-3-clause
| 4,932 | 0.00588 |
# -*- coding: utf-8 -*-
#
# django-timegraph - monitoring graphs for django
# Copyright (c) 2011-2012, Wifirst
# Copyright (c) 2013, Jeremy Lainé
# All rights reserved.
#
# See AUTHORS file for a full list of contributors.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import os
import shutil
import tempfile
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
import timegraph
from timegraph.models import format_value, Graph, Metric
def setup_test_environment():
timegraph.original_rrd_root = settings.TIMEGRAPH_RRD_ROOT
settings.TIMEGRAPH_RRD_ROOT = tempfile.mkdtemp()
def teardown_test_environment():
shutil.rmtree(settings.TIMEGRAPH_RRD_ROOT)
settings.TIMEGRAPH_RRD_ROOT = timegraph.original_rrd_root
del timegraph.original_rrd_root
class TestFormat(TestCase):
def test_format_none(self):
self.assertEquals(format_value(None, 'b'), '')
self.assertEquals(format_value(None, ''), '')
self.assertEquals(format_value('', 'b'), '')
self.assertEquals(format_value('', ''), '')
self.assertEquals(format_value(object(), 'b'), '')
self.assertEquals(format_value(object(), ''), '')
def test_format_byte(self):
self.assertEquals(format_value(0, 'b'), '0 b')
self.assertEquals(format_value(1, 'b'), '1 b')
self.assertEquals(format_value(1024, 'b'), '1.0 kib')
self.assertEquals(format_value(2048, 'b'), '2.0 kib')
self.assertEquals(format_value(1048576, 'b'), '1.0 Mib')
self.assertEquals(format_value(1073741824, 'b'), '1.0 Gib')
self.assertEquals(format_value(1099511627776, 'b'), '1.0 Tib')
self.assertEquals(format_value(1125899906842624, 'b'), '1.0 Pib')
self.assertEquals(format_value(1152921504606846976, 'b'), '1.0 Eib')
# FIXME: there seems to be a rounding error
#self.assertEquals(format_value(1180591620717411303424, 'b'), '1.0 Zib')
#self.assertEquals(format_value(1208925819614629174706176, 'b'), '1.0 Yib')
self.assertEquals(format_value(1237940039285380274899124224, 'b'), '1024.0 Yib')
def test_format_int(self):
self.assertEquals(format_value(0, 's'), '0 s')
self.assertEquals(format_value(1, 's'), '1 s')
self.assertEquals(format_value(10, 's'), '10 s')
self.assertEquals(format_value(100, 's'), '100 s')
self.assertEquals(format_value(1000, 's'), '1.0 ks')
self.assertEquals(format_value(10000, 's'), '10.0 ks')
self.assertEquals(format_value(100000, 's'), '100.0 ks')
self.assertEquals(format_value(1000000, 's'), '1.0 Ms')
self.assertEquals(format_value(10000000, 's'), '10.0 Ms')
self.assertEquals(format_value(100000000, 's'), '100.0 Ms')
self.assertEquals(format_value(1000000000, 's'), '1.0 Gs')
self.assertEquals(format_value(10000000000, 's'), '10.0 Gs')
self.assertEquals(format_value(100000000000, 's'), '100.0 Gs')
self.assertEquals(format_value(1000000000000, 's'), '1.0 Ts')
self.assertEquals(format_value(10000000000000, 's'), '10.0 Ts')
self.assertEquals(format_value(100000000000000, 's'), '100.0 Ts')
self.assertEquals(format_value(1000000000000000, 's'), '1.0 Ps')
self.assertEquals(format_value(10000000000000000, 's'), '10.0 Ps')
self.assertEquals(format_value(100000000000000000, 's'), '100.0 Ps')
self.assertEquals(format_value(1000000000000000000, 's'), '1.0 Es')
self.assertEquals(format_value(10000000000000000000, 's'), '10.0 Es')
self.assertEquals(format_value(100000000000000000000, 's'), '100.0 Es')
self.assertEquals(format_value(1000000000000000000000, 's'), '1.0 Zs')
self.assertEquals(format_value(10000000000000000000000, 's'), '10.0 Zs')
self.assertEquals(format_value(1000000000000000000000000, 's'), '1.0 Ys')
self.assertEquals(format_value(10000000000000000000000000, 's'), '10.0 Ys')
self.assertEquals(format_value(100000000000000000000000000, 's'), '100.0 Ys')
self.assertEquals(format_value(1000000000000000000000000000, 's'), '1000.0 Ys')
self.assertEquals(format_value(10000000000000000000000000000, 's'), '10000.0 Ys')
def test_format_float(self):
self.assertEquals(format_value(0.0, 's'), '0.0 s')
self.assertEquals(format_value(0.00000000000000000000000001, 's'), u'0.0 ys')
self.assertEquals(format_value(0.0000000000000000000000001, 's'), u'0.1 ys')
self.assertEquals(format_value(0.000000000000000000000001, 's'), u'1.0 ys')
self.assertEquals(format_value(0.00000000000000000000001, 's'), u'10.0 ys')
self.assertEquals(format_value(0.0000000000000000000001, 's'), u'100.0 ys')
self.assertEquals(format_value(0.000000000000000000001, 's'), u'1.0 zs')
self.assertEquals(format_value(0.00000000000000000001, 's'), u'10.0 zs')
self.assertEquals(format_value(0.0000000000000000001, 's'), u'100.0 zs')
self.assertEquals(format_value(0.000000000000000001, 's'), u'1.0 as')
self.assertEquals(format_value(0.00000000000000001, 's'), u'10.0 as')
self.assertEquals(format_value(0.0000000000000001, 's'), u'100.0 as')
self.assertEquals(format_value(0.000000000000001, 's'), u'1.0 fs')
self.assertEquals(format_value(0.00000000000001, 's'), u'10.0 fs')
self.assertEquals(format_value(0.0000000000001, 's'), u'100.0 fs')
self.assertEquals(format_value(0.000000000001, 's'), u'1.0 ps')
self.assertEquals(format_value(0.00000000001, 's'), u'10.0 ps')
self.assertEquals(format_value(0.0000000001, 's'), u'100.0 ps')
self.assertEquals(format_value(0.000000001, 's'), u'1.0 ns')
self.assertEquals(format_value(0.00000001, 's'), u'10.0 ns')
self.assertEquals(format_value(0.0000001, 's'), u'100.0 ns')
self.assertEquals(format_value(0.000001, 's'), u'1.0 µs')
self.assertEquals(format_value(0.00001, 's'), u'10.0 µs')
self.assertEquals(format_value(0.0001, 's'), u'100.0 µs')
self.assertEquals(format_value(0.001, 's'), '1.0 ms')
self.assertEquals(format_value(0.01, 's'), '10.0 ms')
self.assertEquals(format_value(0.1, 's'), '100.0 ms')
self.assertEquals(format_value(1.0, 's'), '1.0 s')
self.assertEquals(format_value(10.0, 's'), '10.0 s')
self.assertEquals(format_value(100.0, 's'), '100.0 s')
self.assertEquals(format_value(1000.0, 's'), '1.0 ks')
self.assertEquals(format_value(10000.0, 's'), '10.0 ks')
self.assertEquals(format_value(100000.0, 's'), '100.0 ks')
self.assertEquals(format_value(1000000.0, 's'), '1.0 Ms')
self.assertEquals(format_value(10000000.0, 's'), '10.0 Ms')
self.assertEquals(format_value(100000000.0, 's'), '100.0 Ms')
self.assertEquals(format_value(1000000000.0, 's'), '1.0 Gs')
self.assertEquals(format_value(10000000000.0, 's'), '10.0 Gs')
self.assertEquals(format_value(100000000000.0, 's'), '100.0 Gs')
self.assertEquals(format_value(1000000000000.0, 's'), '1.0 Ts')
self.assertEquals(format_value(10000000000000.0, 's'), '10.0 Ts')
self.assertEquals(format_value(100000000000000.0, 's'), '100.0 Ts')
self.assertEquals(format_value(1000000000000000.0, 's'), '1.0 Ps')
self.assertEquals(format_value(10000000000000000.0, 's'), '10.0 Ps')
self.assertEquals(format_value(100000000000000000.0, 's'), '100.0 Ps')
self.assertEquals(format_value(1000000000000000000.0, 's'), '1.0 Es')
self.assertEquals(format_value(10000000000000000000.0, 's'), '10.0 Es')
self.assertEquals(format_value(100000000000000000000.0, 's'), '100.0 Es')
self.assertEquals(format_value(1000000000000000000000.0, 's'), '1.0 Zs')
self.assertEquals(format_value(10000000000000000000000.0, 's'), '10.0 Zs')
self.assertEquals(format_value(100000000000000000000000.0, 's'), '100.0 Zs')
self.assertEquals(format_value(1000000000000000000000000.0, 's'), '1.0 Ys')
self.assertEquals(format_value(10000000000000000000000000.0, 's'), '10.0 Ys')
self.assertEquals(format_value(100000000000000000000000000.0, 's'), '100.0 Ys')
self.assertEquals(format_value(1000000000000000000000000000.0, 's'), '1000.0 Ys')
self.assertEquals(format_value(10000000000000000000000000000.0, 's'), '10000.0 Ys')
def test_format_percent(self):
self.assertEquals(format_value(0.0, '%'), '0.0 %')
self.assertEquals(format_value(0.1, '%'), '0.1 %')
self.assertEquals(format_value(1000, '%'), '1000 %')
def test_format_string(self):
self.assertEquals(format_value('abc', 'foo'), 'abc foo')
self.assertEquals(format_value('0.1.0', ''), '0.1.0')
class TestGraph(TestCase):
def test_unicode(self):
m = Graph(title='foo bar')
self.assertEquals(unicode(m), 'foo bar')
class TestMetric(TestCase):
fixtures = ['test_timegraph_metrics.json', 'test_timegraph_users.json']
def setUp(self):
setup_test_environment()
def tearDown(self):
teardown_test_environment()
def test_is_summable(self):
m = Metric(type='bool')
self.assertEquals(m.is_summable, False)
m = Metric(type='float', unit='s')
self.assertEquals(m.is_summable, True)
m = Metric(type='float', unit=u'°C')
self.assertEquals(m.is_summable, False)
m = Metric(type='float', unit=u'°F')
self.assertEquals(m.is_summable, False)
m = Metric(type='int', unit='err')
self.assertEquals(m.is_summable, True)
m = Metric(type='int', unit='%')
self.assertEquals(m.is_summable, False)
m = Metric(type='string')
self.assertEquals(m.is_summable, False)
def test_set_get(self):
metric = Metric.objects.get(pk=1)
user = User.objects.get(pk=1)
metric.set_polling(user, '1.23')
value = metric.get_polling(user)
self.assertEquals(value, 1.23)
def test_to_python_bool(self):
m = Metric(type='bool')
self.assertEquals(m.to_python(None), False)
self.assertEquals(m.to_python(''), False)
self.assertEquals(m.to_python('0'), False)
self.assertEquals(m.to_python('False'), False)
self.assertEquals(m.to_python('True'), True)
self.assertEquals(m.to_python('1'), True)
def test_to_python_float(self):
m = Metric(type='float')
self.assertEquals(m.to_python(None), 0.0)
self.assertEquals(m.to_python(''), 0.0)
self.assertEquals(m.to_python('0.0'), 0.0)
self.assertEquals(m.to_python('1.3'), 1.3)
self.assertEquals(m.to_python('10.1'), 10.1)
def test_to_python_int(self):
m = Metric(type='int')
self.assertEquals(m.to_python(None), 0)
self.assertEquals(m.to_python(''), 0)
self.assertEquals(m.to_python('0'), 0)
self.assertEquals(m.to_python('1'), 1)
self.assertEquals(m.to_python('10'), 10)
def test_to_python_string(self):
m = Metric(type='string')
self.assertEquals(m.to_python(None), '')
self.assertEquals(m.to_python(''), '')
self.assertEquals(m.to_python('0'), '0')
self.assertEquals(m.to_python('1'), '1')
self.assertEquals(m.to_python('abcd'), 'abcd')
def test_pre_key(self):
metric = Metric.objects.get(pk=1)
user = User.objects.get(pk=1)
self.assertEquals(metric._pre_key_for(user) % user.pk, '%s/user/1/1' % settings.TIMEGRAPH_CACHE_PREFIX)
def test_rrd_path(self):
metric = Metric.objects.get(pk=1)
user = User.objects.get(pk=1)
self.assertEquals(metric._rrd_path(user), os.path.join(settings.TIMEGRAPH_RRD_ROOT, 'user', '1', '1.rrd'))
def test_unicode(self):
m = Metric(name='foo bar')
self.assertEquals(unicode(m), 'foo bar')
|
jlaine/django-timegraph
|
timegraph/tests/test_timegraph.py
|
Python
|
bsd-2-clause
| 13,243 | 0.003248 |
from __future__ import unicode_literals, division, absolute_import
import logging
import sys
import os
from flexget import plugin
from flexget.event import event
log = logging.getLogger('change')
found_deprecated = False
class ChangeWarn(object):
"""
Gives warning if user has deprecated / changed configuration in the root level.
Will be replaced by root level validation in the future!
Contains ugly hacks, better to include all deprecation warnings here during 1.0 BETA phase
"""
def on_task_start(self, task, config):
global found_deprecated
if 'torrent_size' in task.config:
log.critical('Plugin torrent_size is deprecated, use content_size instead')
found_deprecated = True
if 'nzb_size' in task.config:
log.critical('Plugin nzb_size is deprecated, use content_size instead')
found_deprecated = True
if found_deprecated:
task.manager.scheduler.shutdown(finish_queue=False)
task.abort('Deprecated config.')
@event('plugin.register')
def register_plugin():
plugin.register(ChangeWarn, 'change_warn', builtin=True, api_ver=2)
# check that no old plugins are in pre-compiled form (pyc)
try:
import os.path
plugin_dirs = (os.path.normpath(sys.path[0] + '/../flexget/plugins/'),
os.path.normpath(sys.path[0] + '/../flexget/plugins/input/'))
for plugin_dir in plugin_dirs:
for name in os.listdir(plugin_dir):
require_clean = False
if name.startswith('module'):
require_clean = True
if name == 'csv.pyc':
require_clean = True
if 'resolver' in name:
require_clean = True
if 'filter_torrent_size' in name:
require_clean = True
if 'filter_nzb_size' in name:
require_clean = True
if 'module_priority' in name:
require_clean = True
if 'ignore_feed' in name:
require_clean = True
if 'module_manual' in name:
require_clean = True
if 'output_exec' in name:
require_clean = True
if 'plugin_adv_exec' in name:
require_clean = True
if 'output_transmissionrpc' in name:
require_clean = True
if require_clean:
log.critical('-' * 79)
log.critical('IMPORTANT: Your installation has some files from older FlexGet!')
log.critical('')
log.critical(' Please remove all pre-compiled .pyc and .pyo files from %s' % plugin_dir)
log.critical(' Offending file: %s' % name)
log.critical('')
log.critical(' After getting rid of these FlexGet should run again normally')
from flexget import __version__ as version
if version == '{git}':
log.critical('')
log.critical(' If you are using bootstrapped git checkout you can run:')
log.critical(' bin/paver clean_compiled')
log.critical('')
log.critical('-' * 79)
found_deprecated = True
break
except:
pass
|
voriux/Flexget
|
flexget/plugins/plugin_change_warn.py
|
Python
|
mit
| 3,389 | 0.002951 |
# copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Wrappers to get actually replaceable DBAPI2 compliant modules and
database connection whatever the database and client lib used.
Currently support:
- postgresql (pgdb, psycopg, psycopg2, pyPgSQL)
- mysql (MySQLdb)
- sqlite (pysqlite2, sqlite, sqlite3)
just use the `get_connection` function from this module to get a
wrapped connection. If multiple drivers for a database are available,
you can control which one you want to use using the
`set_prefered_driver` function.
Additional helpers are also provided for advanced functionalities such
as listing existing users or databases, creating database... Get the
helper for your database using the `get_adv_func_helper` function.
"""
__docformat__ = "restructuredtext en"
from warnings import warn
warn('this module is deprecated, use logilab.database instead',
DeprecationWarning, stacklevel=1)
from logilab.database import (get_connection, set_prefered_driver,
get_dbapi_compliant_module as _gdcm,
get_db_helper as _gdh)
def get_dbapi_compliant_module(driver, *args, **kwargs):
module = _gdcm(driver, *args, **kwargs)
module.adv_func_helper = _gdh(driver)
return module
|
dbbhattacharya/kitsune
|
vendor/packages/logilab-common/db.py
|
Python
|
bsd-3-clause
| 2,037 | 0.003436 |
from concurrent.futures import ThreadPoolExecutor
import grpc
import pytest
from crawler.services import Library
import crawler_pb2_grpc
@pytest.fixture(name='grpc_client', scope='session', autouse=True)
def setup_grpc_client():
server = grpc.server(ThreadPoolExecutor(max_workers=4))
crawler_pb2_grpc.add_LibraryServicer_to_server(Library(), server)
port = server.add_insecure_port('[::]:0')
server.start()
with grpc.insecure_channel(f'localhost:{port}') as channel:
yield crawler_pb2_grpc.LibraryStub(channel)
server.stop(0)
|
xavierdutreilh/robots.midgar.fr
|
services/crawler/tests/conftest.py
|
Python
|
mit
| 564 | 0.001773 |
__author__ = "Josh Perry"
import os
import shutil
import urllib2
import zipfile
import rarfile
from bs4 import BeautifulSoup
dumping_directory = "W:\Plugin repo\dump"
def save_file(url, filename):
filename = "out/" + filename
# Check if we already have it
if os.path.isfile(filename + ".zip") or os.path.isfile(filename + ".rar") or os.path.isfile(filename + ".zip"):
print "We already have " + filename + ", skipping"
return
print("Downloading... " + filename)
try:
f = open(filename, 'wb')
f.write(urllib2.urlopen(url).read())
f.close()
if zipfile.is_zipfile(filename):
print(filename + " is a zip file")
shutil.move(filename, filename + ".zip")
elif rarfile.is_rarfile(filename):
print(filename + " is a rar file")
shutil.move(filename, filename + ".rar")
else:
print(filename + " is an nds file")
shutil.move(filename, filename + ".nds")
except urllib2.URLError:
print "Failed to download: " + filename
def nds_homebrew_hive():
base_url = "http://www.ndshb.com"
# Apps #
page_template = "http://www.ndshb.com/index.php/component/jdownloads/viewcategory/3-apps?start={0}"
os.mkdir("out")
os.mkdir("out/ndshb_apps")
for i in range(0, 7):
page = page_template.format(i * 10)
f = urllib2.urlopen(page)
soup = BeautifulSoup(f.read(), "html.parser")
for link in soup.find_all(class_="jd_download_url"):
url = link["href"]
filename = url.split('/')[-1].split("?")[0]
save_file(base_url + url, "ndshb_apps/" + filename)
# Games #
page_template = "http://www.ndshb.com/index.php/component/jdownloads/viewcategory/4-games?start={0}"
if not os.path.isdir("out/ndshb_games"):
os.mkdir("out/ndshb_games")
for i in range(0, 10):
page = page_template.format(i * 10)
f = urllib2.urlopen(page)
soup = BeautifulSoup(f.read(), "html.parser")
for link in soup.find_all(class_="jd_download_url"):
url = link["href"]
filename = url.split('/')[-1].split("?")[0]
save_file(base_url + url, "ndshb_games/" + filename)
def process_files(directory):
for root, directories, files in os.walk(directory):
for f in files:
try:
original = os.path.join(root, f)
output = os.path.join(dumping_directory, f)
# Extract zip files
if f.endswith(".zip"):
with zipfile.ZipFile(original, "r") as z:
os.mkdir(output[:-3])
z.extractall(output[:-3])
# Extract rar files
elif f.endswith(".rar"):
with rarfile.RarFile(original, "r") as z:
os.mkdir(output[:-3])
z.extractall(output[:-3])
# Just copy nds files
elif f.endswith(".nds"):
os.mkdir(output[:-3])
shutil.copy(original, os.path.join(output[:-3], f))
except (zipfile.BadZipfile, rarfile.BadRarFile):
print "Bad archive: " + f
continue
except Exception as e:
print e
continue
print "Processed " + f
for d in directories:
process_files(d)
def main():
nds_homebrew_hive()
if not os.path.isdir(dumping_directory):
os.mkdir(dumping_directory)
process_files("out")
if __name__ == "__main__":
main()
|
josh-perry/NDSHomebrewDownloader
|
main.py
|
Python
|
mit
| 3,662 | 0.001638 |
__author__ = 'Conscience'
from django.conf.urls import url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^$', views.post_list, name='post_list'),
url(r'^post/(?P<pk>[0-9]+)/$', views.post_detail, name='post_detail'),
url(r'^post/$', views.post_list, name='post_list'),
url(r'^post/new/$', views.post_new, name='post_new'),
url(r'^post/(?P<pk>[0-9]+)/edit/$', views.post_edit, name='post_edit'),
]
|
Rivares/MyBlog
|
blog/urls.py
|
Python
|
apache-2.0
| 446 | 0.002242 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2017-08-24
git sha : $Format:%H$
copyright : (C) 2017 by Philipe Borba - Cartographic Engineer @ Brazilian Army
email : borba.philipe@eb.mil.br
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
from functools import partial
from qgis.core import Qgis
from qgis.utils import iface
from qgis.PyQt import uic
from qgis.PyQt.QtGui import QIcon, QColor, QKeySequence
from qgis.PyQt.QtCore import Qt, QSize, pyqtSlot, pyqtSignal, QSettings
from qgis.PyQt.QtWidgets import (QWidget,
QSpinBox,
QLineEdit,
QCheckBox,
QComboBox,
QPushButton,
QHBoxLayout,
QMessageBox,
QDoubleSpinBox)
from DsgTools.core.Utils.utils import Utils, MessageRaiser
from DsgTools.core.GeometricTools.layerHandler import LayerHandler
from DsgTools.gui.ProductionTools.Toolboxes.CustomFeatureToolBox.customButtonSetup import CustomButtonSetup, CustomFeatureButton
FORM_CLASS, _ = uic.loadUiType(os.path.join(
os.path.dirname(__file__), 'buttonPropWidget.ui'))
utils = Utils()
class ButtonPropWidget(QWidget, FORM_CLASS):
# col enum
COL_COUNT = 5
ATTR_COL, VAL_COL, PK_COL, EDIT_COL, IGNORED_COL = range(COL_COUNT)
def __init__(self, parent=None, button=None):
"""
Class constructor.
:param parent: (QtWidgets.*) any widget that 'contains' this tool.
:param buttonProps: (CustomFeatureButton) button to be managed.
"""
super(ButtonPropWidget, self).__init__(parent)
self.setupUi(self)
self.button = button or CustomFeatureButton()
self.fillToolComboBox()
self.colorCheckBox.toggled.connect(self.mColorButton.setEnabled)
self.tooltipCheckBox.toggled.connect(self.toolTipLineEdit.setEnabled)
self.categoryCheckBox.toggled.connect(self.categoryLineEdit.setEnabled)
self.keywordCheckBox.toggled.connect(self.keywordLineEdit.setEnabled)
self.shortcutCheckBox.toggled.connect(self.shortcutWidget.setEnabled)
self.mMapLayerComboBox.layerChanged.connect(self.updateFieldTable)
self.attributeTableWidget.setHorizontalHeaderLabels([
self.tr("Attribute"), self.tr("Value"), self.tr("PK"),
self.tr("Editable"), self.tr("Ignored")
])
self.updateFieldTable()
def confirmAction(self, msg, title=None, showNo=True):
"""
Raises a message box that asks for user confirmation.
:param msg: (str) message requesting for confirmation to be shown.
:param showNo: (bool) whether No button should be exposed.
:return: (bool) whether action was confirmed.
"""
mb = QMessageBox()
title = title or self.tr("Confirm action")
if showNo:
return QMessageBox.question(
self, title, msg, QMessageBox.Yes | QMessageBox.No
) == QMessageBox.Yes
else:
return QMessageBox.question(
self, title, msg, QMessageBox.Ok) == QMessageBox.Ok
def fillToolComboBox(self):
"""
Sets a up available feature extraction tool to GUI.
"""
self.toolComboBox.clear()
# make sure those keys are EXACTLY the same as in "supportedTools"
# method, from CustomFeatureButton
tools = {
self.tr("QGIS default feature extraction tool"): QIcon(""),
self.tr("DSGTools: Free Hand Acquisition"): \
QIcon(':/plugins/DsgTools/icons/free_hand.png'),
self.tr("QGIS Circle extraction tool"): \
QIcon(':/plugins/DsgTools/icons/circle.png'),
self.tr("DSGTools: Right Degree Angle Digitizing"): \
QIcon(':/plugins/DsgTools/icons/home.png')
}
for idx, (tool, icon) in enumerate(tools.items()):
self.toolComboBox.insertItem(idx, tool)
if idx != 0:
self.toolComboBox.setItemIcon(idx, icon)
def setButtonName(self, name):
"""
Sets button name to GUI.
:param name: (str) name to be set to GUI.
"""
self.nameLineEdit.setText(name)
def buttonName(self):
"""
Reads button name from GUI.
:return: (str) button name read from GUI.
"""
return self.nameLineEdit.text().strip()
def setDigitizingTool(self, tool):
"""
Sets button's digitizing tool to GUI.
:param tool: (str) a supported digitizing tool to be set.
"""
tool = CustomFeatureButton().supportedTools()[tool]
self.toolComboBox.setCurrentText(tool)
def digitizingTool(self):
"""
Reads current digitizing tool.
:return: (str) current digitizing tool.
"""
tools = {v: k for k, v in \
CustomFeatureButton().supportedTools().items()}
return tools[self.toolComboBox.currentText()]
def setUseColor(self, useColor):
"""
Sets whether button will have a custom color set as read from GUI.
:param useColor: (bool) whether button should use a custom color
palette.
"""
self.colorCheckBox.setChecked(useColor)
def useColor(self):
"""
Reads whether button will have a custom color from GUI.
:return: (bool) whether button should use a custom color
palette.
"""
return self.colorCheckBox.isChecked()
def setColor(self, color):
"""
Sets custom color to the color widget.
:param color: (str/tuple) color to be set.
"""
if isinstance(color, str):
color = QColor(color)
else:
color = QColor(*color)
self.mColorButton.setColor(color)
def color(self):
"""
Reads custom color to be set to widget as read from GUI.
:return: (tuple) color to be used.
"""
return self.mColorButton.color().getRgb()
def setUseToolTip(self, useToolTip):
"""
Defines if button will have a tool tip assigned to it as read from GUI.
:param useToolTip: (bool) whether button will have a tool tip assigned.
"""
self.tooltipCheckBox.setChecked(useToolTip)
def useToolTip(self):
"""
Reads if the button will have a tool tip assigned to it from GUI.
:return: (bool) whether the button will have a tool tip assigned.
"""
return self.tooltipCheckBox.isChecked()
def setToolTip(self, tooltip):
"""
Sets a tool tip for the active button widget.
:param tooltip: (str) tool tip to be set.
"""
self.toolTipLineEdit.setText(tooltip)
def toolTip(self):
"""
Reads the tool tip for the button from GUI.
:param tooltip: (str) tool tip to be used.
"""
return self.toolTipLineEdit.text()
def setUseCategory(self, useCat):
"""
Sets button's category/group to GUI.
:param useCat: (bool) whether button will have a category assigned.
"""
self.categoryCheckBox.setChecked(useCat)
def useCategory(self):
"""
Reads button's category/group from GUI.
:return: (bool) whether button will have a category assigned.
"""
return self.categoryCheckBox.isChecked()
def setCategory(self, cat):
"""
Assigns a category/group to the active button.
:param cat: (str) category to be set.
"""
self.categoryLineEdit.setText(cat)
def category(self):
"""
Reads the assigned category/group to the active button from GUI.
:return: (str) category to be used.
"""
return self.categoryLineEdit.text()
def setUseKeywords(self, useKw):
"""
Sets whether active button should have keywords for button searching.
:param useKw: (bool) whether button will have keywords assigned to it.
"""
self.keywordCheckBox.setChecked(useKw)
def useKeywords(self):
"""
Reads whether active button should have keywords for button searching
from GUI.
:return: (bool) whether button will have keywords assigned to it.
"""
return self.keywordCheckBox.isChecked()
def setKeywords(self, kws):
"""
Sets button's keywords for button searching.
:param kws: (set-of-str) set of keywords to be assigned to the button.
"""
self.keywordLineEdit.setText(" ".join(kws))
def keywords(self):
"""
Reads button's keywords for button searching from GUI.
:return: (set-of-str) set of keywords to be assigned to the button.
"""
return set(self.keywordLineEdit.text().strip().split(" "))
def setUseShortcut(self, useShortcut):
"""
Sets whether active button should have a shortcut assigned to it.
:param useShortcut: (bool) whether button will have a shortcut assigned.
"""
self.shortcutCheckBox.setChecked(useShortcut)
def useShortcut(self):
"""
Reads whether active button should have a shortcut assigned to it from GUI.
:return: (bool) whether button will have a shortcut assigned.
"""
return self.shortcutCheckBox.isChecked()
def checkShortcut(self, s):
"""
Verifies if a shortcut is already set to any action on QGIS.
:param s: (str) shortcut to be checked.
:return: (str) action associated with given shortcut.
"""
if s == "":
return ""
for m in dir(iface):
if m.startswith("action") and \
getattr(iface, m)().shortcut().toString().lower() == s.lower():
return getattr(iface, m)().text()
return ""
def setShortcurt(self, s, autoReplace=True):
"""
Assigns a shortcut to trigger active button's action.
:param s: (str) new shortcut to be set.
:param autoReplace: (bool) whether a confirmation from the user is
necessary in order to replace existing shortcuts.
"""
s = s.replace(" ", "")
action = self.checkShortcut(s)
if not autoReplace and action != "":
txt = self.tr("Shortcut {s} is already assigned to {a}, would you "
"like to replace it?").format(s=s, a=action)
if not self.confirmAction(txt, self.tr("Replace shortcut")):
return
self.shortcutWidget.setShortcut(QKeySequence.fromString(s))
def shortcut(self):
"""
Assigned shortcut read from GUI.
:return: (str) shortcut to be used.
"""
s = self.shortcutWidget.getShortcut(True)
return s.toString() if s != 0 else ""
def setOpenForm(self, openForm):
"""
Defines whether (re)classification tool will open feature form while
being used.
:param openForm: (bool) whether feature form should be opened.
"""
self.openFormCheckBox.setChecked(openForm)
def openForm(self):
"""
Defines whether (re)classification tool will open feature form while
being used.
:return: (bool) whether feature form should be opened.
"""
return self.openFormCheckBox.isChecked()
def setAttributeMap(self, attrMap):
"""
Sets the attribute value map for current button to GUI.
:param attrMap: (dict) a map from each field and its value to be set.
"""
self.updateFieldTable()
table = self.attributeTableWidget
vl = self.vectorLayer()
valueMaps = dict()
# displayed values are always "aliased" when possible, so map needs to
# be reversed (e.g. set to actual value to display name)
if vl is not None:
for fName, vMap in LayerHandler().valueMaps(vl).items():
valueMaps[fName] = {v: k for k, v in vMap.items()}
def setMappedValue(cb, field, value):
vlFound = value in valueMaps[field]
if not vlFound:
msg = self.tr("'{0}' is an invalid value for field {1}. (Is "
"the layer style generated from the current data"
" model?")\
.format(value, field)
title = self.tr("DSGTools Custom Feature Tool Box")
MessageRaiser().raiseIfaceMessage(title, msg, Qgis.Warning, 5)
value = None
if value is None:
return
cb.setCurrentText(valueMaps[field][value])
pkIdxList = vl.primaryKeyAttributes() if vl else []
for row in range(table.rowCount()):
attr = table.cellWidget(row, self.ATTR_COL).text().replace("&", "")
valueWidget = table.cellWidget(row, self.VAL_COL)
isPk = row in pkIdxList
if not attrMap or attr not in attrMap:
attrMap[attr] = {
"value": None,
"editable": False,
"ignored": isPk, # default is False unless it's a PK attr
"isPk": isPk
}
{
QLineEdit: lambda v: valueWidget.setText(v or ""),
QSpinBox: lambda v: valueWidget.setValue(v or 0),
QDoubleSpinBox: lambda v: valueWidget.setValue(v or 0.0),
QComboBox: lambda v: setMappedValue(valueWidget, attr, v)
}[type(valueWidget)](attrMap[attr]["value"])
valueWidget.setEnabled(not attrMap[attr]["ignored"])
table.cellWidget(row, self.EDIT_COL).cb.setChecked(
attrMap[attr]["editable"])
table.cellWidget(row, self.IGNORED_COL).cb.setChecked(
attrMap[attr]["ignored"])
table.setCellWidget(row, self.PK_COL,
self.pkWidget() if isPk else QWidget())
def attributeMap(self):
"""
Reads the field map data and set it to a button attribute map format.
:return: (dict) read attribute map.
"""
attrMap = dict()
table = self.attributeTableWidget
vMaps = LayerHandler().valueMaps(self.vectorLayer()) \
if self.vectorLayer() else {}
for row in range(table.rowCount()):
attr = table.cellWidget(row, self.ATTR_COL).text().replace("&", "")
attrMap[attr] = dict()
valueWidget = table.cellWidget(row, self.VAL_COL)
attrMap[attr]["ignored"] = table.cellWidget(row, self.IGNORED_COL)\
.cb.isChecked()
# "ignored" still allows the value to be set as last priority
attrMap[attr]["value"] = {
QLineEdit: lambda: valueWidget.text(),
QSpinBox: lambda: valueWidget.value(),
QDoubleSpinBox: lambda: valueWidget.value(),
QComboBox: lambda: vMaps[attr][valueWidget.currentText()]
}[type(valueWidget)]()
attrMap[attr]["isPk"] = isinstance(
table.cellWidget(row, self.PK_COL), QPushButton)
attrMap[attr]["editable"] = table.cellWidget(row, self.EDIT_COL)\
.cb.isChecked()
return attrMap
def setLayer(self, layer):
"""
Sets current layer selection on GUI.
:param layer: (str) name for the layer to be set.
"""
if layer != "":
self.mMapLayerComboBox.setCurrentText(layer)
else:
self.mMapLayerComboBox.setCurrentIndex(0)
def layer(self):
"""
Reads the name for the selected layer from GUI.
:return: (str) name for the selected layer.
"""
return self.mMapLayerComboBox.currentText()
def vectorLayer(self):
"""
Reads current layer selection from GUI.
:return: (QgsVectorLayer) selected vector layer.
"""
return self.mMapLayerComboBox.currentLayer()
def centeredCheckBox(self):
"""
Instantiates a centered check box.
:return: (QWidget) a QCheckBox centered on a widget.
"""
w = QWidget()
l = QHBoxLayout()
l.setAlignment(Qt.AlignCenter)
cb = QCheckBox()
# just an easy way to access the cb
w.cb = cb
l.addWidget(cb)
w.setLayout(l)
return w
def pkWidget(self):
"""
Instanciates a push button with no border using a key as an icon to be
used on rows associated with primary key attributes.
"""
pb = QPushButton()
pb.setIcon(QIcon(':/plugins/DsgTools/icons/key.png'))
pb.setFlat(True)
pb.blockSignals(True)
pb.setObjectName("pkWidget")
pb.setText("")
return pb
def attributeNameWidget(self, fieldName, isNotNull):
"""
Retrieves a widget to be used into field table to expose field's name.
:param fieldName: (str) fieldName to be exhibited.
:param isNotNull: (bool) whether field is a mandatory attribute.
:return: (QPushButton) a button ready to be setup to GUI.
"""
pb = QPushButton()
pb.setText(fieldName)
pb.setFlat(True)
pb.setEnabled(False)
if isNotNull:
pb.setStyleSheet(
"*{ color:rgb(150, 10, 25); "\
"background-color:rgba(255, 88, 116, 1.00); }"
)
pb.setToolTip(self.tr("Field cannot be empty"))
else:
pb.setStyleSheet("color: black;")
return pb
def valueWidget(self, field, data):
"""
Retrieves correct widget for a given field based on its type.
:param field: (QgsField) field to be represented.
:param data: (float/int/str) initial data to be set to widget.
:return: (QDoubleSpinBox/QSpinBox/QLineEdit) the adequate widget for
field.
"""
if utils.fieldIsFloat(field):
vWidget = QDoubleSpinBox()
vWidget.setMaximum(99999999)
vWidget.setMinimum(-99999999)
if data is not None:
vWidget.setValue(data)
elif utils.fieldIsInt(field):
vWidget = QSpinBox()
vWidget.setMaximum(99999999)
vWidget.setMinimum(-99999999)
if data is not None:
vWidget.setValue(data)
else:
vWidget = QLineEdit()
vWidget.setPlaceholderText(
self.tr("Type the value for {0}").format(field.name()))
if data is not None:
vWidget.setText(data)
return vWidget
def updateFieldTable(self, layer=None):
"""
Updates current displayed fields based on current layer selection.
:param layer: (QgsVectorLayer) layer to have its fields exposed.
"""
layer = layer or self.vectorLayer()
self.attributeTableWidget.setRowCount(0)
if layer is None:
return
fields = layer.fields()
pkIdxList = layer.primaryKeyAttributes() if layer else []
if layer.name() == self.button.layer():
attrMap = self.button.attributeMap()
else:
# it does not make sense to use the saved map valued for a
# different layer selection
attrMap = dict()
valueMaps = dict()
# displayed values are always "aliased" when possible, so map needs to
# be reversed (e.g. set to actual value to display name)
for fName, vMap in self.readButton().valueMaps().items():
valueMaps[fName] = {v: k for k, v in vMap.items()}
virtualFields = list()
for idx, f in enumerate(fields):
if fields.fieldOrigin(idx) == fields.OriginExpression:
virtualFields.append(f.name())
self.attributeTableWidget.setRowCount(len(fields) - len(virtualFields))
def setDisabled(w, status):
w.setEnabled(not status)
for row, field in enumerate(fields):
fName = field.name()
if fName in virtualFields:
# virtual fields are ignored
continue
isPk = row in pkIdxList
notNull = not utils.fieldIsNullable(field)
self.attributeTableWidget.setCellWidget(
row, self.ATTR_COL, self.attributeNameWidget(fName, notNull))
if fName not in attrMap:
attrMap[fName] = {
"value": None,
"editable": False,
"ignored": isPk, # default is False unless it's a PK attr
"isPk": isPk
}
value = attrMap[fName]["value"]
if fName in valueMaps:
vWidget = QComboBox()
vWidget.addItems(set(valueMaps[fName].values()))
if value is not None and value in valueMaps[fName]:
# an "old" version of the map may have been loaded
value = valueMaps[fName][value]
vWidget.setCurrentText(value)
else:
vWidget = self.valueWidget(field, value)
vWidget.setEnabled(not attrMap[fName]["ignored"])
self.attributeTableWidget.setCellWidget(row, self.VAL_COL, vWidget)
ccbEdit = self.centeredCheckBox()
ccbEdit.cb.setChecked(attrMap[fName]["editable"])
self.attributeTableWidget.setCellWidget(
row, self.EDIT_COL, ccbEdit)
ccbIgnore = self.centeredCheckBox()
ccbIgnore.cb.setChecked(attrMap[fName]["ignored"])
ccbIgnore.cb.toggled.connect(partial(setDisabled, vWidget))
self.attributeTableWidget.setCellWidget(
row, self.IGNORED_COL, ccbIgnore)
def checkExclusiveCB(ccb1, ccb2):
"""
Method to make two CB to be mutually exclusive (like radio buttons.
"""
cb = self.sender()
if cb == ccb2.cb:
# just to make sure var 'cb1' is always the cb that was
# checked by the user
cb = ccb2
ccb2 = ccb1
ccb1 = cb
if ccb1.cb.isChecked() and ccb2.cb.isChecked():
ccb2.cb.setChecked(False)
exclusiveCb = partial(checkExclusiveCB, ccbEdit, ccbIgnore)
ccbIgnore.cb.toggled.connect(exclusiveCb)
ccbEdit.cb.toggled.connect(exclusiveCb)
# since row is from an enum of fields, field idx = row
self.attributeTableWidget.setCellWidget(row, self.PK_COL,
self.pkWidget() if isPk else QWidget())
def setButton(self, button):
"""
Sets button properties to the GUI.
:param button: (CustomFeatureButton) button to be set to the GUI.
"""
self.setButtonName(button.name())
self.setDigitizingTool(button.digitizingTool())
self.setUseColor(button.useColor())
self.setColor(button.color())
self.setUseToolTip(bool(button.toolTip()))
self.setToolTip(button.toolTip())
self.setUseCategory(bool(button.category()))
self.setCategory(button.category())
self.setUseKeywords(bool(button.keywords()))
self.setKeywords(button.keywords())
self.setUseShortcut(bool(button.shortcut()))
self.setShortcurt(button.shortcut())
self.setOpenForm(button.openForm())
self.setLayer(button.layer())
self.updateFieldTable()
self.setAttributeMap(button.attributeMap())
self.mColorButton.setEnabled(button.useColor())
self.toolTipLineEdit.setEnabled(bool(button.toolTip()))
self.categoryLineEdit.setEnabled(bool(button.category()))
self.keywordLineEdit.setEnabled(bool(button.keywords()))
self.shortcutWidget.setEnabled(bool(button.shortcut()))
self.button = button
def readButton(self):
"""
Reads data from the interface and sets it to a button object.
:return: (CustomFeatureButton) button read from the interface.
"""
b = CustomFeatureButton()
b.setName(self.buttonName())
b.setDigitizingTool(self.digitizingTool())
b.setUseColor(self.useColor())
if self.useColor():
b.setColor(self.color())
b.setToolTip(self.toolTip() if self.useToolTip() else "")
b.setCategory(self.category() if self.useCategory() else "")
b.setKeywords(self.keywords() if self.useKeywords() else set(""))
b.setShortcut(self.shortcut() if self.useShortcut() else "")
b.setOpenForm(self.openForm())
b.setLayer(self.layer())
b.setAttributeMap(self.attributeMap())
return b
def currentButtonName(self):
"""
Retrieves currently selected button on button combo box.
:return: (CustomFeatureButton) button read from the setup object.
"""
return self.button.name()
def currentButton(self):
"""
Retrieves currently SAVED button.
:return: (CustomFeatureButton) current button.
"""
return self.button
def setEnabled(self, enabled):
"""
Defines whether all widgets should be enabled.
:param enabled: (bool) widgets enabling status.
"""
self.nameLineEdit.setEnabled(enabled)
self.toolComboBox.setEnabled(enabled)
self.colorCheckBox.setEnabled(enabled)
self.mColorButton.setEnabled(enabled)
self.tooltipCheckBox.setEnabled(enabled)
self.toolTipLineEdit.setEnabled(enabled)
self.categoryCheckBox.setEnabled(enabled)
self.categoryLineEdit.setEnabled(enabled)
self.keywordCheckBox.setEnabled(enabled)
self.keywordLineEdit.setEnabled(enabled)
self.shortcutCheckBox.setEnabled(enabled)
self.openFormCheckBox.setEnabled(enabled)
self.mMapLayerComboBox.setEnabled(enabled)
self.attributeTableWidget.setEnabled(enabled)
|
lcoandrade/DsgTools
|
gui/CustomWidgets/BasicInterfaceWidgets/buttonPropWidget.py
|
Python
|
gpl-2.0
| 27,350 | 0.00106 |
"""Init file for the paci helpers"""
|
tradebyte/paci
|
paci/helpers/__init__.py
|
Python
|
mit
| 37 | 0 |
import logging
from json import JSONEncoder, dumps
from flask import current_app, make_response, request
__all__ = (
"serialize",
"jsonify",
)
log = logging.getLogger(__name__)
def serialize(rv):
log.debug("Serializing output")
if rv is None or (isinstance(rv, str) and not len(rv)):
log.info("No content")
rv = make_response("", 204)
elif (
isinstance(rv, current_app.response_class)
or callable(rv)
or isinstance(rv, str)
):
...
else:
log.info("Serializing")
rv = jsonify(rv)
if request.method == "POST":
make_response(rv, 201)
return rv
def jsonify(*args, **kwargs):
if args and kwargs:
raise TypeError("jsonify() behavior undefined when passed both args and kwargs")
elif len(args) == 1: # single args are passed directly to dumps()
data = args[0]
else:
data = args or kwargs
pretty_print = bool(
request.args.get(
"pretty-print", current_app.config["JSONIFY_PRETTYPRINT_REGULAR"]
)
)
indent = None
separators = (",", ":")
cls = current_app.json_encoder or JSONEncoder
if pretty_print is True and request.is_xhr is False:
indent = 2
separators = (", ", ": ")
if hasattr(request, "operation") and request.operation.produces:
mime_type = request.operation.produces[0]
elif "JSONIFY_MIMETYPE" in current_app.config:
mime_type = current_app.config["JSONIFY_MIMETYPE"]
else:
mime_type = "application/json; charset=utf-8"
json_str = dumps(data, indent=indent, separators=separators, cls=cls) + "\n"
json_str.encode("utf-8")
return current_app.response_class(json_str, mimetype=mime_type)
|
pegasus-isi/pegasus
|
packages/pegasus-python/src/Pegasus/service/_serialize.py
|
Python
|
apache-2.0
| 1,770 | 0.00113 |
#
# Copyright (c) 2009 Mason Green & Tom Novelli
#
# This file is part of OpenMelee.
#
# OpenMelee is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# OpenMelee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OpenMelee. If not, see <http://www.gnu.org/licenses/>.
#
from math import atan2, pi
from steer import Steer
from engine import draw_line
class AI(object):
def __init__(self, ship, enemy, actors):
self.ship = ship
self.steer = Steer(ship, actors)
self.max_prediction_time = 0.25
self.planet = ship.melee.planet
self.enemy = enemy
# Elementary steering AI
def update(self):
st = self.steer.collision_threat(2.5)
self.range = (self.ship.body.position - self.enemy.body.position).length
range2 = (self.ship.body.position - self.planet.body.position).length
margin = self.planet.radius + self.ship.radius * 2.0
if st is None and range2 > margin:
self.chase()
return
if st:
self.avoid(st)
def chase(self):
st = self.steer.target(self.enemy, self.max_prediction_time)
#p1 = self.ship.body.position
#draw_line(p1.x, p1.y, st.x, st.y)
st = self.ship.body.get_local_point(st)
# Ship's heading is 180 off rigid body's heading => add pi
angle = atan2(st.x, st.y) + pi
if self.range < 50 and (angle < 0.05 or angle > 6.233):
self.ship.fire()
if angle > 0.05 and angle < 6.233:
if angle >= 0.05 and angle < pi:
self.ship.turn_right()
else:
self.ship.turn_left()
else:
self.ship.body.angular_velocity = 0.0
if self.range > 5.0:
self.ship.thrust()
def avoid(self, st):
k = self.ship.body.get_local_point(st)
angle = atan2(k.x, k.y) + pi
t = self.ship.body.linear_velocity.cross(st)
if self.range < 50 and (angle < 0.05 or angle > 6.233):
self.ship.fire()
if t >= 0:
self.ship.turn_right()
else:
self.ship.turn_left()
self.ship.thrust()
|
zzzzrrr/openmelee
|
ai/ai.py
|
Python
|
gpl-3.0
| 2,647 | 0.005667 |
# -*- coding: utf-8 -*-
ESQUEMA_ATUAL = u'pl_005f'
#
# Envelopes SOAP
#
from .soap_100 import SOAPEnvio as SOAPEnvio_110
from .soap_100 import SOAPRetorno as SOAPRetorno_110
#
# Emissão de NF-e
#
from .nfe_110 import NFe as NFe_110
from .nfe_110 import NFRef as NFRef_110
from .nfe_110 import Det as Det_110
from .nfe_110 import DI as DI_110
from .nfe_110 import Adi as Adi_110
from .nfe_110 import Med as Med_110
from .nfe_110 import Arma as Arma_110
from .nfe_110 import Reboque as Reboque_110
from .nfe_110 import Vol as Vol_110
from .nfe_110 import Lacres as Lacres_110
from .nfe_110 import Dup as Dup_110
from .nfe_110 import ObsCont as ObsCont_110
from .nfe_110 import ObsFisco as ObsFisco_110
from .nfe_110 import ProcRef as ProcRef_110
#
# Envio de lote de NF-e
#
from .envinfe_110 import EnviNFe as EnviNFe_110
from .envinfe_110 import RetEnviNFe as RetEnviNFe_110
#
# Consulta do recibo do lote de NF-e
#
from .consrecinfe_110 import ConsReciNFe as ConsReciNFe_110
from .consrecinfe_110 import RetConsReciNFe as RetConsReciNFe_110
from .consrecinfe_110 import ProtNFe as ProtNFe_110
from .consrecinfe_110 import ProcNFe as ProcNFe_110
#
# Cancelamento de NF-e
#
from .cancnfe_107 import CancNFe as CancNFe_107
from .cancnfe_107 import RetCancNFe as RetCancNFe_107
from .cancnfe_107 import ProcCancNFe as ProcCancNFe_107
#
# Inutilização de NF-e
#
from .inutnfe_107 import InutNFe as InutNFe_107
from .inutnfe_107 import RetInutNFe as RetInutNFe_107
from .inutnfe_107 import ProcInutNFe as ProcInutNFe_107
#
# Consulta a situação de NF-e
#
from .conssitnfe_107 import ConsSitNFe as ConsSitNFe_107
from .conssitnfe_107 import RetConsSitNFe as RetConsSitNFe_107
#
# Consulta a situação do serviço
#
from .consstatserv_107 import ConsStatServ as ConsStatServ_107
from .consstatserv_107 import RetConsStatServ as RetConsStatServ_107
#
# Consulta cadastro
#
from .conscad_101 import ConsCad as ConsCad_101
from .conscad_101 import RetConsCad as RetConsCad_101
|
thiagopena/PySIGNFe
|
pysignfe/nfe/manual_300/__init__.py
|
Python
|
lgpl-2.1
| 1,984 | 0.017713 |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import asyncio
import os
from azure.identity.aio import DefaultAzureCredential
from azure.keyvault.certificates import CertificateContentType, CertificatePolicy, WellKnownIssuerNames
from azure.keyvault.certificates.aio import CertificateClient
# ----------------------------------------------------------------------------------------------------------
# Prerequisites:
# 1. An Azure Key Vault (https://docs.microsoft.com/en-us/azure/key-vault/quick-create-cli)
#
# 2. azure-keyvault-certificates and azure-identity packages (pip install these)
#
# 3. Set up your environment to use azure-identity's DefaultAzureCredential. To authenticate a service principal with
# environment variables, set AZURE_CLIENT_ID, AZURE_CLIENT_SECRET, and AZURE_TENANT_ID
# (See https://github.com/Azure/azure-sdk-for-python/tree/main/sdk/keyvault/azure-keyvault-administration#authenticate-the-client)
#
# 4. A PFX certificate on your machine. Set an environment variable, PFX_CERT_PATH, with the path to this certificate.
#
# 5. A PEM-formatted certificate on your machine. Set an environment variable, PEM_CERT_PATH, with the path to this
# certificate.
#
# ----------------------------------------------------------------------------------------------------------
# Sample - demonstrates importing a PFX and PEM-formatted certificate into Azure Key Vault
#
# 1. Import an existing PFX certificate (import_certificate)
#
# 2. Import an existing PEM-formatted certificate (import_certificate)
#
# ----------------------------------------------------------------------------------------------------------
async def run_sample():
# Instantiate a certificate client that will be used to call the service.
# Here we use the DefaultAzureCredential, but any azure-identity credential can be used.
VAULT_URL = os.environ["VAULT_URL"]
credential = DefaultAzureCredential()
client = CertificateClient(vault_url=VAULT_URL, credential=credential)
# Let's import a PFX certificate first.
# Assuming you already have a PFX containing your key pair, you can import it into Key Vault.
# You can do this without setting a policy, but the policy is needed if you want the private key to be exportable
# or to configure actions when a certificate is close to expiration.
pfx_cert_name = "pfxCert"
with open(os.environ["PFX_CERT_PATH"], "rb") as f:
pfx_cert_bytes = f.read()
imported_pfx_cert = await client.import_certificate(
certificate_name=pfx_cert_name, certificate_bytes=pfx_cert_bytes
)
print("PFX certificate '{}' imported successfully.".format(imported_pfx_cert.name))
# Now let's import a PEM-formatted certificate.
# To import a PEM-formatted certificate, you must provide a CertificatePolicy that sets the content_type to
# CertificateContentType.pem or the certificate will fail to import (the default content type is PFX).
pem_cert_name = "pemCert"
with open(os.environ["PEM_CERT_PATH"], "rb") as f:
pem_cert_bytes = f.read()
pem_cert_policy = CertificatePolicy(issuer_name=WellKnownIssuerNames.self, content_type=CertificateContentType.pem)
imported_pem_cert = await client.import_certificate(
certificate_name=pem_cert_name, certificate_bytes=pem_cert_bytes, policy=pem_cert_policy
)
print("PEM-formatted certificate '{}' imported successfully.".format(imported_pem_cert.name))
await credential.close()
await client.close()
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(run_sample())
loop.close()
|
Azure/azure-sdk-for-python
|
sdk/keyvault/azure-keyvault-certificates/samples/import_certificate_async.py
|
Python
|
mit
| 3,725 | 0.005101 |
import pybedtools
import os
testdir = os.path.dirname(__file__)
test_tempdir = os.path.join(os.path.abspath(testdir), 'tmp')
unwriteable = os.path.join(os.path.abspath(testdir), 'unwriteable')
def setup():
if not os.path.exists(test_tempdir):
os.system('mkdir -p %s' % test_tempdir)
pybedtools.set_tempdir(test_tempdir)
def teardown():
if os.path.exists(test_tempdir):
os.system('rm -r %s' % test_tempdir)
pybedtools.cleanup()
|
jos4uke/getSeqFlankBlatHit
|
lib/python2.7/site-packages/pybedtools/test/tfuncs.py
|
Python
|
gpl-2.0
| 462 | 0.004329 |
from django import template
register = template.Library()
# Used to create the generated link url for the templates
@register.filter
def get_link_filter(obj, arg):
base = obj.get_link(arg)
return base
# Used to create the base url of the generated link for the templates
@register.filter
def get_base_link_filter(obj):
base = obj.get_base_link()
return base
|
djorda9/Simulated-Conversations
|
vagrant/simcon/templatetags/generatelink_extras.py
|
Python
|
mit
| 376 | 0.007979 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from migrate.versioning import api as versioning_api
# See LP bug #719834. sqlalchemy-migrate changed location of
# exceptions.py after 0.6.0.
try:
from migrate.versioning import exceptions as versioning_exceptions
except ImportError:
from migrate import exceptions as versioning_exceptions
from glance.common import exception
logger = logging.getLogger('glance.registry.db.migration')
def db_version(conf):
"""
Return the database's current migration number
:param conf: conf dict
:retval version number
"""
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
try:
return versioning_api.db_version(sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError, e:
msg = (_("database '%(sql_connection)s' is not under "
"migration control") % locals())
raise exception.DatabaseMigrationError(msg)
def upgrade(conf, version=None):
"""
Upgrade the database's current migration level
:param conf: conf dict
:param version: version to upgrade (defaults to latest)
:retval version number
"""
db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
version_str = version or 'latest'
logger.info(_("Upgrading %(sql_connection)s to version %(version_str)s") %
locals())
return versioning_api.upgrade(sql_connection, repo_path, version)
def downgrade(conf, version):
"""
Downgrade the database's current migration level
:param conf: conf dict
:param version: version to downgrade to
:retval version number
"""
db_version(conf) # Ensure db is under migration control
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
logger.info(_("Downgrading %(sql_connection)s to version %(version)s") %
locals())
return versioning_api.downgrade(sql_connection, repo_path, version)
def version_control(conf):
"""
Place a database under migration control
:param conf: conf dict
"""
sql_connection = conf.sql_connection
try:
_version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e:
msg = (_("database '%(sql_connection)s' is already under migration "
"control") % locals())
raise exception.DatabaseMigrationError(msg)
def _version_control(conf):
"""
Place a database under migration control
:param conf: conf dict
"""
repo_path = get_migrate_repo_path()
sql_connection = conf.sql_connection
return versioning_api.version_control(sql_connection, repo_path)
def db_sync(conf, version=None):
"""
Place a database under migration control and perform an upgrade
:param conf: conf dict
:retval version number
"""
try:
_version_control(conf)
except versioning_exceptions.DatabaseAlreadyControlledError, e:
pass
upgrade(conf, version=version)
def get_migrate_repo_path():
"""Get the path for the migrate repository."""
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
return path
|
rcbops/glance-buildpackage
|
glance/registry/db/migration.py
|
Python
|
apache-2.0
| 3,992 | 0 |
# -*- coding: utf-8 -*-
# Author: <Your name>
# License see LICENSE
from PyKDE4.kdecore import i18n
from PyKDE4.kdeui import KAction, KIcon
from PyQt4.QtCore import QObject
from PyQt4.QtGui import QMenu
from libkatepate.errors import showOk, showError
import kate
class MyPlugin(QObject):
def __init__(self):
QObject.__init__(self)
self.window = kate.mainInterfaceWindow().window()
showOk('MyPlugin inits!')
# self.act = KAction(KIcon("reload"), i18n("Auto Reload"), self)
# self.act.setObjectName("test")
# self.window.actionCollection().addAction(self.act.objectName(), self.act)
# self.window.findChild(QMenu, 'view').addAction(self.act)
# if not self.act.objectName() in kate.configuration:
# kate.configuration[self.act.objectName()] = "alt+r"
# self.act.setShortcut(kate.configuration[self.act.objectName()])
# self.act.setCheckable(True)
# self.act.setChecked(False)
# self.act.changed.connect(self.onActionChange)
# self.act.toggled.connect(self.toggle)
# kate.mainInterfaceWindow().viewChanged.connect(self.onViewChanged)
def onActionChange(self):
kate.configuration[self.sender().objectName()] = self.sender().shortcut().toString()
kate.configuration.save()
print(self.sender().objectName() + ': Save ' + kate.configuration[self.sender().objectName()])
|
LarsBV/kate_plugin_template
|
my_plugin.py
|
Python
|
bsd-2-clause
| 1,396 | 0.012894 |
"""
Utilities for managing Debian preseed
.. versionadded:: 2015.8.0
"""
import shlex
import salt.utils.files
import salt.utils.stringutils
import salt.utils.yaml
def mksls(src, dst=None):
"""
Convert a preseed file to an SLS file
"""
ps_opts = {}
with salt.utils.files.fopen(src, "r") as fh_:
for line in fh_:
line = salt.utils.stringutils.to_unicode(line)
if line.startswith("#"):
continue
if not line.strip():
continue
comps = shlex.split(line)
if comps[0] not in ps_opts.keys():
ps_opts[comps[0]] = {}
cmds = comps[1].split("/")
pointer = ps_opts[comps[0]]
for cmd in cmds:
pointer = pointer.setdefault(cmd, {})
pointer["type"] = comps[2]
if len(comps) > 3:
pointer["argument"] = comps[3]
sls = {}
# Set language
# ( This looks like it maps to something else )
sls[ps_opts["d-i"]["languagechooser"]["language-name-fb"]["argument"]] = {
"locale": ["system"]
}
# Set keyboard
# ( This looks like it maps to something else )
sls[ps_opts["d-i"]["kbd-chooser"]["method"]["argument"]] = {"keyboard": ["system"]}
# Set timezone
timezone = ps_opts["d-i"]["time"]["zone"]["argument"]
sls[timezone] = {"timezone": ["system"]}
if ps_opts["d-i"]["tzconfig"]["gmt"]["argument"] == "true":
sls[timezone]["timezone"].append("utc")
# Set network
if "netcfg" in ps_opts["d-i"].keys():
iface = ps_opts["d-i"]["netcfg"]["choose_interface"]["argument"]
sls[iface] = {}
sls[iface]["enabled"] = True
if ps_opts["d-i"]["netcfg"]["confirm_static"] == "true":
sls[iface]["proto"] = "static"
elif ps_opts["d-i"]["netcfg"]["disable_dhcp"] == "false":
sls[iface]["proto"] = "dhcp"
sls[iface]["netmask"] = ps_opts["d-i"]["netcfg"]["get_netmask"]["argument"]
sls[iface]["domain"] = ps_opts["d-i"]["netcfg"]["get_domain"]["argument"]
sls[iface]["gateway"] = ps_opts["d-i"]["netcfg"]["get_gateway"]["argument"]
sls[iface]["hostname"] = ps_opts["d-i"]["netcfg"]["get_hostname"]["argument"]
sls[iface]["ipaddress"] = ps_opts["d-i"]["netcfg"]["get_ipaddress"]["argument"]
sls[iface]["nameservers"] = ps_opts["d-i"]["netcfg"]["get_nameservers"][
"argument"
]
if dst is not None:
with salt.utils.files.fopen(dst, "w") as fh_:
salt.utils.yaml.safe_dump(sls, fh_, default_flow_style=False)
else:
return salt.utils.yaml.safe_dump(sls, default_flow_style=False)
|
saltstack/salt
|
salt/utils/preseed.py
|
Python
|
apache-2.0
| 2,707 | 0.002586 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.