text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from django.apps import AppConfig
class CompetencesConfig(AppConfig):
name = 'competences'
verbose_name = "Компетенции"
| ITOO-UrFU/open-programs | open_programs/apps/competences/apps.py | Python | unlicense | 141 | 0 |
#!/usr/bin/env python3
import os
import sys
from tools import impl
parser = impl.argparser()
parser.add_argument("-o", dest="output", action=impl.StripQuotesAction)
(options, args) = parser.parse_known_args()
# ranlib may have hid the archive next to what buck thinks the archive is
input = args[-1] + ".secret"
if not os.path.exists(input):
input = args[-1]
with open(options.output, "w") as output:
output.write("linker:\n")
with open(input) as inputfile:
output.write(inputfile.read())
sys.exit(0)
| facebook/buck | test/com/facebook/buck/cxx/testdata/cxx_toolchain/tools/linker.py | Python | apache-2.0 | 531 | 0 |
# Copyright 2020 The TensorFlow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Image functions."""
# python3
from cvx2 import latest as cv2
import numpy as np
def get_affine_transform(center, scale, rot, output_size, inverse=False):
"""Affine transform."""
if not isinstance(scale, (np.ndarray, list)):
scale = np.array([scale, scale], dtype=np.float32)
dst_w, dst_h = output_size[0], output_size[1]
rot_rad = np.pi * rot / 180
src_dir = get_dir([0, scale[0] * -0.5], rot_rad)
dst_dir = np.array([0, dst_w * -0.5], np.float32)
src = np.zeros((3, 2), dtype=np.float32)
dst = np.zeros((3, 2), dtype=np.float32)
src[0, :], src[1, :] = center, center + src_dir
dst[0, :] = [dst_w * 0.5, dst_h * 0.5]
dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5], np.float32) + dst_dir
src[2:, :] = get_3rd_point(src[0, :], src[1, :])
dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :])
if inverse:
transform = cv2.getAffineTransform(np.float32(dst), np.float32(src))
else:
transform = cv2.getAffineTransform(np.float32(src), np.float32(dst))
return transform
def get_3rd_point(point_1, point_2):
tmp_point = point_1 - point_2
return point_2 + np.array([-tmp_point[1], tmp_point[0]], dtype=np.float32)
def get_dir(point, rot_rad):
sin_rot, cos_rot = np.sin(rot_rad), np.cos(rot_rad)
result = [0, 0]
result[0] = point[0] * cos_rot - point[1] * sin_rot
result[1] = point[0] * sin_rot + point[1] * cos_rot
return np.array(result)
def transform_points(points, center, scale, output_size, inverse=False):
transform = get_affine_transform(
center, scale, 0, output_size, inverse=inverse)
new_points = np.concatenate([points, np.ones([points.shape[0], 1])], axis=1)
points_transformed = np.dot(transform, new_points.T).T
return points_transformed
def transform_predictions(points, center, scale, output_size):
return transform_points(points, center, scale, output_size, inverse=True)
| tensorflow/graphics | tensorflow_graphics/projects/points_to_3Dobjects/utils/image.py | Python | apache-2.0 | 2,459 | 0.011793 |
"""
registration tool for cobbler.
Copyright 2009 Red Hat, Inc.
Michael DeHaan <mdehaan@redhat.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import random
import os
import traceback
try:
from optparse import OptionParser
except:
from opt_parse import OptionParser # importing this for backwards compat with 2.2
import exceptions
try:
import subprocess as sub_process
except:
import sub_process
import time
import errno
import sys
import xmlrpclib
import glob
import socket
import utils
import string
import pprint
# usage: cobbler-register [--server=server] [--hostname=hostname] --profile=foo
def main():
"""
Command line stuff...
"""
p = OptionParser()
p.add_option("-s", "--server",
dest="server",
default=os.environ.get("COBBLER_SERVER",""),
help="attach to this cobbler server")
p.add_option("-f", "--fqdn",
dest="hostname",
default="",
help="override the discovered hostname")
p.add_option("-p", "--port",
dest="port",
default="80",
help="cobbler port (default 80)")
p.add_option("-P", "--profile",
dest="profile",
default="",
help="assign this profile to this system")
p.add_option("-b", "--batch",
dest="batch",
action="store_true",
help="indicates this is being run from a script")
(options, args) = p.parse_args()
#if not os.getuid() == 0:
# print "koan requires root access"
# return 3
try:
k = Register()
k.server = options.server
k.port = options.port
k.profile = options.profile
k.hostname = options.hostname
k.batch = options.batch
k.run()
except Exception, e:
(xa, xb, tb) = sys.exc_info()
try:
getattr(e,"from_koan")
print str(e)[1:-1] # nice exception, no traceback needed
except:
print xa
print xb
print string.join(traceback.format_list(traceback.extract_tb(tb)))
return 1
return 0
#=======================================================
class InfoException(exceptions.Exception):
"""
Custom exception for tracking of fatal errors.
"""
def __init__(self,value,**args):
self.value = value % args
self.from_koan = 1
def __str__(self):
return repr(self.value)
#=======================================================
class Register:
def __init__(self):
"""
Constructor. Arguments will be filled in by optparse...
"""
self.server = ""
self.port = ""
self.profile = ""
self.hostname = ""
self.batch = ""
#---------------------------------------------------
def run(self):
"""
Commence with the registration already.
"""
# not really required, but probably best that ordinary users don't try
# to run this not knowing what it does.
if os.getuid() != 0:
raise InfoException("root access is required to register")
print "- preparing to koan home"
self.conn = utils.connect_to_server(self.server, self.port)
reg_info = {}
print "- gathering network info"
netinfo = utils.get_network_info()
reg_info["interfaces"] = netinfo
print "- checking hostname"
sysname = ""
if self.hostname != "" and self.hostname != "*AUTO*":
hostname = self.hostname
sysname = self.hostname
else:
hostname = socket.getfqdn()
if hostname == "localhost.localdomain":
if self.hostname == '*AUTO*':
hostname = ""
sysname = str(time.time())
else:
raise InfoException("must specify --fqdn, could not discover")
if sysname == "":
sysname = hostname
if self.profile == "":
raise InfoException("must specify --profile")
# we'll do a profile check here just to avoid some log noise on the remote end.
# network duplication checks and profile checks also happen on the remote end.
avail_profiles = self.conn.get_profiles()
matched_profile = False
for x in avail_profiles:
if x.get("name","") == self.profile:
matched_profile=True
break
reg_info['name'] = sysname
reg_info['profile'] = self.profile
reg_info['hostname'] = hostname
if not matched_profile:
raise InfoException("no such remote profile, see 'koan --list-profiles'")
if not self.batch:
self.conn.register_new_system(reg_info)
print "- registration successful, new system name: %s" % sysname
else:
try:
self.conn.register_new_system(reg_info)
print "- registration successful, new system name: %s" % sysname
except:
traceback.print_exc()
print "- registration failed, ignoring because of --batch"
return
if __name__ == "__main__":
main()
| remotesyssupport/cobbler-1 | koan/register.py | Python | gpl-2.0 | 6,100 | 0.008033 |
"""
Common Django settings for the project.
See the local, test, and production settings modules for the values used
in each environment.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
from django.core.exceptions import ImproperlyConfigured
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.abspath(os.path.join(BASE_DIR, os.pardir))
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'iguod@&u&_(3f_d-z)r_2g)o=mjor_rbo)9)b&19$ih*txgnta'
ALLOWED_HOSTS = []
# Twilio API credentials
TWILIO_ACCOUNT_SID = os.environ.get('TWILIO_ACCOUNT_SID')
TWILIO_AUTH_TOKEN = os.environ.get('TWILIO_AUTH_TOKEN')
# Twilio number
TWILIO_NUMBER = os.environ.get('TWILIO_NUMBER')
# TwiML Application SID
TWIML_APPLICATION_SID = os.environ.get('TWIML_APPLICATION_SID')
if not (TWILIO_ACCOUNT_SID and TWILIO_AUTH_TOKEN and TWILIO_NUMBER and TWIML_APPLICATION_SID):
missing_config_values = \
"""
You must set the TWILIO_ACCOUNT_SID, TWILIO_AUTH_TOKEN, TWILIO_NUMBER, and TWIML_APPLICATION_SID environment variables to run this app.
Consult the README for instructions on how to find them.
"""
raise ImproperlyConfigured(missing_config_values)
# Application definition
DJANGO_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize'
)
THIRD_PARTY_APPS = (
)
LOCAL_APPS = (
'dialer',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'caller.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates/'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'caller.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'caller',
'USER': 'calluser',
'PASSWORD': 'qq',
'HOST': '127.0.0.1',
'PORT': '5432',
},
#'default': {
#'ENGINE': 'django.db.backends.sqlite3',
#'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
#}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static")
]
print STATICFILES_DIRS, BASE_DIR
STATIC_ROOT = '' #BASE_DIR + '/static'
# Messages settings for Bootstrap 3
from django.contrib.messages import constants as messages
MESSAGE_TAGS = {
messages.ERROR: 'danger'
}
# Redirect login to /support/dashboard
LOGIN_REDIRECT_URL = '/support/dashbaord'
| adjustive/caller | caller/settings/common.py | Python | gpl-3.0 | 4,021 | 0.002984 |
from browser import window, document as doc, console
import net
import sim
import json
CTX = None
"""
Bitwrap context
"""
CTL = None
"""
UI control interface
"""
def __onload(ctx):
""" use snap to begin creating an SVG """
global CTL
CTL = Editor()
window.jQuery('#net').on('click', CTL.on_insert)
window.jQuery('.select').on('click', CTL.select)
window.jQuery('.symbol').on('click', CTL.symbol)
window.jQuery('.tool').on('click', CTL.tool)
window.jQuery('.simulator').on('click', CTL.simulator)
global CTX
CTX = ctx
# TODO: control from UI
# maybe add a dropdown
#CTX.machine('counter', callback=CTL.load)
CTX.machine('octoe', callback=CTL.load)
def on_event(_, event):
""" receive event over jQuery binding"""
if 'action' in event:
return CTL.simulation.execute(event['action'])
class Controller(object):
""" Provide interface for UI actions """
def load(self, res):
""" store requested PNML and render as SVG """
pnet = json.loads(res.text)
net.SCHEMA = pnet['machine']['name']
net.NETS[net.SCHEMA] = pnet['machine']
self.reset(callback=self.render)
def reset(self, callback=None):
""" clear SVG and prepare markers """
net.PAPER
if not net.PAPER:
net.PAPER=window.Snap('#net')
net.PAPER.clear()
net.on_load()
if callable(callback):
callback()
def render(self, callback=None):
""" development examples """
if not net.INSTANCE:
net.PNet(self)
net.INSTANCE.render()
self.json_view()
if callable(callback):
callback()
def json_view(self):
_info = json.dumps({
'places': net.INSTANCE.place_defs,
'transitions': net.INSTANCE.transition_defs,
'arcs': net.INSTANCE.arc_defs,
'place_names': net.INSTANCE.place_names,
'token_ledger': net.INSTANCE.token_ledger
})
window.jQuery('#json').JSONView(_info)
class EditorEvents(object):
""" Editor event callbacks """
def on_click(self, event):
""" handle mouse events """
self.callback(event)
def on_select(self, event):
""" callback to show attributes for selected element """
refid, symbol = self._selected(event)
if not refid:
return
console.log('on_select', refid, symbol)
# FIXME: should show info in editor
def on_insert(self, event):
""" insert a symbol into net """
if not self.selected_insert_symbol:
return
new_coords = [event.offsetX, event.offsetY]
# TODO: make call to insert new symbol in INSTANCE
if self.selected_insert_symbol == 'place':
net.INSTANCE.insert_place(new_coords)
else:
net.INSTANCE.insert_transition(new_coords)
self.reset(callback=self.render)
def on_delete(self, event):
""" callback when clicking elements when delete tool is active """
refid, symbol = self._selected(event)
if not refid:
return
if symbol == 'place':
net.INSTANCE.delete_place(refid)
elif symbol == 'transition':
net.INSTANCE.delete_transition(refid)
else: # FIXME implement arc handle
#net.INSTANCE.delete_arc(target_id)
console.log('delete arc', refid)
self.reset(callback=self.render)
def on_trigger(self, event):
""" callback when triggering a transition during a simulation """
action = CTL.simulation.trigger(event)
console.log(net.SCHEMA, CTL.simulation.oid, action)
CTX.dispatch(net.SCHEMA, self.simulation.oid, action)
def on_token_inc(self, event):
return self._token_changed(1, event)
def on_token_dec(self, event):
return self._token_changed(-1, event)
def _token_changed(self, change, event):
refid, symbol = self._selected(event)
if not symbol == 'place':
return
current = net.INSTANCE.token_ledger[refid]
new_token_count = current + change
if new_token_count >= 0:
net.INSTANCE.update_place_tokens(refid, new_token_count)
self.reset(callback=self.render)
def _selected(self, event):
target_id = str(event.target.id)
if not self.is_selectable(target_id):
return [None, None]
return target_id.split('-')
def on_arc_begin(self, event):
begin = self._selected(event)
if not begin:
return
self.callback = self.on_arc_end
self.selected_arc_endpoint = begin
def on_arc_end(self, event):
end = self._selected(event)
if not end:
return
self.callback = self.on_arc_begin
begin = self.selected_arc_endpoint
if begin[1] == end[1]:
return # cannot connect 2 symbols of same type
if begin[1] == 'transition':
txn = begin[0]
place = end[0]
direction = 'to'
diff = 1
else:
txn = end[0]
place = begin[0]
direction = 'from'
diff = -1
if txn not in net.INSTANCE.arc_defs:
net.INSTANCE.arc_defs[txn] = {'to': [], 'from': []}
net.INSTANCE.arc_defs[txn][direction].append(place)
offset = net.INSTANCE.place_defs[place]['offset']
net.INSTANCE.transition_defs[txn]['delta'][offset] = diff
self.selected_arc_endpoint = None # reset
self.reset(callback=self.render)
class Editor(Controller, EditorEvents):
""" Petri-Net editor controls """
def __init__(self):
self.callback = self.on_select
self.move_enabled = True
self.selected_insert_symbol = None
self.selected_arc_endpoint = None
self.simulation = None
def select(self, event):
""" enter select/move mode """
self.move_enabled = True
self.selected_insert_symbol = None
self.callback = self.on_select
def symbol(self, event):
""" enter insert symbol mode """
sym = str(event.target.id)
self.selected_insert_symbol = sym
def simulator(self, event):
""" control start/stop simulation mode """
target_id = event.target.text
if target_id == 'reset':
if self.simulation:
self.simulation.reset()
self.callback = self.on_select
self.move_enabled = True
doc['code'].value = '>>>'
else:
self.move_enabled = False
oid = window.Date.now()
self.simulation = sim.Simulation(oid, net.INSTANCE, self)
CTX.create(net.SCHEMA, oid)
CTX.subscribe(str(net.SCHEMA), str(oid))
console.log(net.SCHEMA, oid, 'NEW')
self.callback = self.on_trigger
def tool(self, event):
""" modify existing symbol on net """
self.move_enabled = False
self.selected_insert_symbol = None
self.selected_arc_endpoint = None
target_id = str(event.target.id)
if target_id == 'arc':
self.callback = self.on_arc_begin
elif target_id == 'delete':
self.callback = self.on_delete
elif target_id == 'dec_token':
self.callback = self.on_token_dec
elif target_id == 'inc_token':
self.callback = self.on_token_inc
def is_selectable(self, target_id):
""" determine if element allows user interaction """
# KLUDGE: relies on a naming convention
# 'primary' labels for symbols are assumed not to use the char '-'
# 'secondary' labels use IDs with the form <primary>-<secondary>
if '-' not in target_id:
return False
else:
return True
| stackdump/txbitwrap | txbitwrap/_brython/ctl.py | Python | mit | 7,943 | 0.001763 |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import hashlib
import ast
import threading
import random
import time
import math
import json
import copy
from operator import itemgetter
from util import print_msg, print_error, NotEnoughFunds
from util import profiler
from bitcoin import *
from account import *
from version import *
from transaction import Transaction
from plugins import run_hook
import bitcoin
from synchronizer import WalletSynchronizer
from mnemonic import Mnemonic
import paymentrequest
# internal ID for imported account
IMPORTED_ACCOUNT = '/x'
class WalletStorage(object):
def __init__(self, path):
self.lock = threading.RLock()
self.data = {}
self.path = path
self.file_exists = False
print_error( "wallet path", self.path )
if self.path:
self.read(self.path)
def read(self, path):
"""Read the contents of the wallet file."""
try:
with open(self.path, "r") as f:
data = f.read()
except IOError:
return
try:
self.data = json.loads(data)
except:
try:
d = ast.literal_eval(data) #parse raw data from reading wallet file
except Exception as e:
raise IOError("Cannot read wallet file '%s'" % self.path)
self.data = {}
# In old versions of Electrum labels were latin1 encoded, this fixes breakage.
labels = d.get('labels', {})
for i, label in labels.items():
try:
unicode(label)
except UnicodeDecodeError:
d['labels'][i] = unicode(label.decode('latin1'))
for key, value in d.items():
try:
json.dumps(key)
json.dumps(value)
except:
print_error('Failed to convert label to json format', key)
continue
self.data[key] = value
self.file_exists = True
def get(self, key, default=None):
with self.lock:
v = self.data.get(key)
if v is None:
v = default
else:
v = copy.deepcopy(v)
return v
def put(self, key, value, save = True):
try:
json.dumps(key)
json.dumps(value)
except:
print_error("json error: cannot save", key)
return
with self.lock:
if value is not None:
self.data[key] = copy.deepcopy(value)
elif key in self.data:
self.data.pop(key)
if save:
self.write()
def write(self):
assert not threading.currentThread().isDaemon()
temp_path = "%s.tmp.%s" % (self.path, os.getpid())
s = json.dumps(self.data, indent=4, sort_keys=True)
with open(temp_path, "w") as f:
f.write(s)
f.flush()
os.fsync(f.fileno())
# perform atomic write on POSIX systems
try:
os.rename(temp_path, self.path)
except:
os.remove(self.path)
os.rename(temp_path, self.path)
if 'ANDROID_DATA' not in os.environ:
import stat
os.chmod(self.path,stat.S_IREAD | stat.S_IWRITE)
class Abstract_Wallet(object):
"""
Wallet classes are created to handle various address generation methods.
Completion states (watching-only, single account, no seed, etc) are handled inside classes.
"""
def __init__(self, storage):
self.storage = storage
self.electrum_version = ELECTRUM_VERSION
self.gap_limit_for_change = 6 # constant
# saved fields
self.seed_version = storage.get('seed_version', NEW_SEED_VERSION)
self.use_change = storage.get('use_change',True)
self.use_encryption = storage.get('use_encryption', False)
self.seed = storage.get('seed', '') # encrypted
self.labels = storage.get('labels', {})
self.frozen_addresses = set(storage.get('frozen_addresses',[]))
self.stored_height = storage.get('stored_height', 0) # last known height (for offline mode)
self.history = storage.get('addr_history',{}) # address -> list(txid, height)
self.fee_per_kb = int(storage.get('fee_per_kb', RECOMMENDED_FEE))
# This attribute is set when wallet.start_threads is called.
self.synchronizer = None
# imported_keys is deprecated. The GUI should call convert_imported_keys
self.imported_keys = self.storage.get('imported_keys',{})
self.load_accounts()
self.load_transactions()
self.build_reverse_history()
# load requests
self.receive_requests = self.storage.get('payment_requests', {})
# spv
self.verifier = None
# Transactions pending verification. Each value is the transaction height. Access with self.lock.
self.unverified_tx = {}
# Verified transactions. Each value is a (height, timestamp, block_pos) tuple. Access with self.lock.
self.verified_tx = storage.get('verified_tx3',{})
# there is a difference between wallet.up_to_date and interface.is_up_to_date()
# interface.is_up_to_date() returns true when all requests have been answered and processed
# wallet.up_to_date is true when the wallet is synchronized (stronger requirement)
self.up_to_date = False
self.lock = threading.Lock()
self.transaction_lock = threading.Lock()
self.tx_event = threading.Event()
self.check_history()
# save wallet type the first time
if self.storage.get('wallet_type') is None:
self.storage.put('wallet_type', self.wallet_type, True)
@profiler
def load_transactions(self):
self.txi = self.storage.get('txi', {})
self.txo = self.storage.get('txo', {})
self.pruned_txo = self.storage.get('pruned_txo', {})
tx_list = self.storage.get('transactions', {})
self.transactions = {}
for tx_hash, raw in tx_list.items():
tx = Transaction(raw)
self.transactions[tx_hash] = tx
if self.txi.get(tx_hash) is None and self.txo.get(tx_hash) is None and (tx_hash not in self.pruned_txo.values()):
print_error("removing unreferenced tx", tx_hash)
self.transactions.pop(tx_hash)
@profiler
def save_transactions(self):
with self.transaction_lock:
tx = {}
for k,v in self.transactions.items():
tx[k] = str(v)
# Flush storage only with the last put
self.storage.put('transactions', tx, False)
self.storage.put('txi', self.txi, False)
self.storage.put('txo', self.txo, False)
self.storage.put('pruned_txo', self.pruned_txo, True)
def clear_history(self):
with self.transaction_lock:
self.txi = {}
self.txo = {}
self.pruned_txo = {}
self.save_transactions()
with self.lock:
self.history = {}
self.tx_addr_hist = {}
self.storage.put('addr_history', self.history, True)
@profiler
def build_reverse_history(self):
self.tx_addr_hist = {}
for addr, hist in self.history.items():
for tx_hash, h in hist:
s = self.tx_addr_hist.get(tx_hash, set())
s.add(addr)
self.tx_addr_hist[tx_hash] = s
@profiler
def check_history(self):
save = False
for addr, hist in self.history.items():
if not self.is_mine(addr):
self.history.pop(addr)
save = True
continue
for tx_hash, tx_height in hist:
if tx_hash in self.pruned_txo.values() or self.txi.get(tx_hash) or self.txo.get(tx_hash):
continue
tx = self.transactions.get(tx_hash)
if tx is not None:
tx.deserialize()
self.add_transaction(tx_hash, tx, tx_height)
if save:
self.storage.put('addr_history', self.history, True)
# wizard action
def get_action(self):
pass
def basename(self):
return os.path.basename(self.storage.path)
def convert_imported_keys(self, password):
for k, v in self.imported_keys.items():
sec = pw_decode(v, password)
pubkey = public_key_from_private_key(sec)
address = public_key_to_bc_address(pubkey.decode('hex'))
if address != k:
raise InvalidPassword()
self.import_key(sec, password)
self.imported_keys.pop(k)
self.storage.put('imported_keys', self.imported_keys)
def load_accounts(self):
self.accounts = {}
d = self.storage.get('accounts', {})
for k, v in d.items():
if self.wallet_type == 'old' and k in [0, '0']:
v['mpk'] = self.storage.get('master_public_key')
self.accounts['0'] = OldAccount(v)
elif v.get('imported'):
self.accounts[k] = ImportedAccount(v)
elif v.get('xpub'):
self.accounts[k] = BIP32_Account(v)
elif v.get('pending'):
try:
self.accounts[k] = PendingAccount(v)
except:
pass
else:
print_error("cannot load account", v)
def synchronize(self):
pass
def can_create_accounts(self):
return False
def set_up_to_date(self,b):
with self.lock: self.up_to_date = b
def is_up_to_date(self):
with self.lock: return self.up_to_date
def update(self):
self.up_to_date = False
while not self.is_up_to_date():
time.sleep(0.1)
def is_imported(self, addr):
account = self.accounts.get(IMPORTED_ACCOUNT)
if account:
return addr in account.get_addresses(0)
else:
return False
def has_imported_keys(self):
account = self.accounts.get(IMPORTED_ACCOUNT)
return account is not None
def import_key(self, sec, password):
assert self.can_import(), 'This wallet cannot import private keys'
try:
pubkey = public_key_from_private_key(sec)
address = public_key_to_bc_address(pubkey.decode('hex'))
except Exception:
raise Exception('Invalid private key')
if self.is_mine(address):
raise Exception('Address already in wallet')
if self.accounts.get(IMPORTED_ACCOUNT) is None:
self.accounts[IMPORTED_ACCOUNT] = ImportedAccount({'imported':{}})
self.accounts[IMPORTED_ACCOUNT].add(address, pubkey, sec, password)
self.save_accounts()
# force resynchronization, because we need to re-run add_transaction
if address in self.history:
self.history.pop(address)
if self.synchronizer:
self.synchronizer.add(address)
return address
def delete_imported_key(self, addr):
account = self.accounts[IMPORTED_ACCOUNT]
account.remove(addr)
if not account.get_addresses(0):
self.accounts.pop(IMPORTED_ACCOUNT)
self.save_accounts()
def set_label(self, name, text = None):
changed = False
old_text = self.labels.get(name)
if text:
if old_text != text:
self.labels[name] = text
changed = True
else:
if old_text:
self.labels.pop(name)
changed = True
if changed:
self.storage.put('labels', self.labels, True)
run_hook('set_label', name, text, changed)
return changed
def addresses(self, include_change = True):
return list(addr for acc in self.accounts for addr in self.get_account_addresses(acc, include_change))
def is_mine(self, address):
return address in self.addresses(True)
def is_change(self, address):
if not self.is_mine(address): return False
acct, s = self.get_address_index(address)
if s is None: return False
return s[0] == 1
def get_address_index(self, address):
for acc_id in self.accounts:
for for_change in [0,1]:
addresses = self.accounts[acc_id].get_addresses(for_change)
if address in addresses:
return acc_id, (for_change, addresses.index(address))
raise Exception("Address not found", address)
def get_private_key(self, address, password):
if self.is_watching_only():
return []
account_id, sequence = self.get_address_index(address)
return self.accounts[account_id].get_private_key(sequence, self, password)
def get_public_keys(self, address):
account_id, sequence = self.get_address_index(address)
return self.accounts[account_id].get_pubkeys(*sequence)
def sign_message(self, address, message, password):
keys = self.get_private_key(address, password)
assert len(keys) == 1
sec = keys[0]
key = regenerate_key(sec)
compressed = is_compressed(sec)
return key.sign_message(message, compressed, address)
def decrypt_message(self, pubkey, message, password):
address = public_key_to_bc_address(pubkey.decode('hex'))
keys = self.get_private_key(address, password)
secret = keys[0]
ec = regenerate_key(secret)
decrypted = ec.decrypt_message(message)
return decrypted
def add_unverified_tx(self, tx_hash, tx_height):
if tx_height > 0:
with self.lock:
self.unverified_tx[tx_hash] = tx_height
def add_verified_tx(self, tx_hash, info):
with self.lock:
self.verified_tx[tx_hash] = info # (tx_height, timestamp, pos)
self.storage.put('verified_tx3', self.verified_tx, True)
self.network.trigger_callback('updated')
def get_unverified_txs(self):
'''Returns a list of tuples (tx_hash, height) that are unverified and not beyond local height'''
txs = []
with self.lock:
for tx_hash, tx_height in self.unverified_tx.items():
# do not request merkle branch before headers are available
if tx_hash not in self.verified_tx and tx_height <= self.get_local_height():
txs.append((tx_hash, tx_height))
return txs
def undo_verifications(self, height):
'''Used by the verifier when a reorg has happened'''
txs = []
with self.lock:
for tx_hash, item in self.verified_tx:
tx_height, timestamp, pos = item
if tx_height >= height:
self.verified_tx.pop(tx_hash, None)
txs.append(tx_hash)
return txs
def get_local_height(self):
""" return last known height if we are offline """
return self.network.get_local_height() if self.network else self.stored_height
def get_confirmations(self, tx):
""" return the number of confirmations of a monitored transaction. """
with self.lock:
if tx in self.verified_tx:
height, timestamp, pos = self.verified_tx[tx]
conf = (self.get_local_height() - height + 1)
if conf <= 0: timestamp = None
elif tx in self.unverified_tx:
conf = -1
timestamp = None
else:
conf = 0
timestamp = None
return conf, timestamp
def get_txpos(self, tx_hash):
"return position, even if the tx is unverified"
with self.lock:
x = self.verified_tx.get(tx_hash)
y = self.unverified_tx.get(tx_hash)
if x:
height, timestamp, pos = x
return height, pos
elif y:
return y, 0
else:
return 1e12, 0
def is_found(self):
return self.history.values() != [[]] * len(self.history)
def get_num_tx(self, address):
""" return number of transactions where address is involved """
return len(self.history.get(address, []))
def get_tx_delta(self, tx_hash, address):
"effect of tx on address"
# pruned
if tx_hash in self.pruned_txo.values():
return None
delta = 0
# substract the value of coins sent from address
d = self.txi.get(tx_hash, {}).get(address, [])
for n, v in d:
delta -= v
# add the value of the coins received at address
d = self.txo.get(tx_hash, {}).get(address, [])
for n, v, cb in d:
delta += v
return delta
def get_wallet_delta(self, tx):
""" effect of tx on wallet """
addresses = self.addresses(True)
is_relevant = False
is_send = False
is_pruned = False
is_partial = False
v_in = v_out = v_out_mine = 0
for item in tx.inputs:
addr = item.get('address')
if addr in addresses:
is_send = True
is_relevant = True
d = self.txo.get(item['prevout_hash'], {}).get(addr, [])
for n, v, cb in d:
if n == item['prevout_n']:
value = v
break
else:
value = None
if value is None:
is_pruned = True
else:
v_in += value
else:
is_partial = True
if not is_send:
is_partial = False
for addr, value in tx.get_outputs():
v_out += value
if addr in addresses:
v_out_mine += value
is_relevant = True
if is_pruned:
# some inputs are mine:
fee = None
if is_send:
v = v_out_mine - v_out
else:
# no input is mine
v = v_out_mine
else:
v = v_out_mine - v_in
if is_partial:
# some inputs are mine, but not all
fee = None
is_send = v < 0
else:
# all inputs are mine
fee = v_out - v_in
return is_relevant, is_send, v, fee
def get_addr_io(self, address):
h = self.history.get(address, [])
received = {}
sent = {}
for tx_hash, height in h:
l = self.txo.get(tx_hash, {}).get(address, [])
for n, v, is_cb in l:
received[tx_hash + ':%d'%n] = (height, v, is_cb)
for tx_hash, height in h:
l = self.txi.get(tx_hash, {}).get(address, [])
for txi, v in l:
sent[txi] = height
return received, sent
def get_addr_utxo(self, address):
coins, spent = self.get_addr_io(address)
for txi in spent:
coins.pop(txi)
return coins
# return the total amount ever received by an address
def get_addr_received(self, address):
received, sent = self.get_addr_io(address)
return sum([v for height, v, is_cb in received.values()])
# return the balance of a bitcoin address: confirmed and matured, unconfirmed, unmatured
def get_addr_balance(self, address):
received, sent = self.get_addr_io(address)
c = u = x = 0
for txo, (tx_height, v, is_cb) in received.items():
if is_cb and tx_height + COINBASE_MATURITY > self.get_local_height():
x += v
elif tx_height > 0:
c += v
else:
u += v
if txo in sent:
if sent[txo] > 0:
c -= v
else:
u -= v
return c, u, x
def get_spendable_coins(self, domain = None, exclude_frozen = True):
coins = []
if domain is None:
domain = self.addresses(True)
if exclude_frozen:
domain = set(domain) - self.frozen_addresses
for addr in domain:
c = self.get_addr_utxo(addr)
for txo, v in c.items():
tx_height, value, is_cb = v
if is_cb and tx_height + COINBASE_MATURITY > self.get_local_height():
continue
prevout_hash, prevout_n = txo.split(':')
output = {
'address':addr,
'value':value,
'prevout_n':int(prevout_n),
'prevout_hash':prevout_hash,
'height':tx_height,
'coinbase':is_cb
}
coins.append((tx_height, output))
continue
# sort by age
if coins:
coins = sorted(coins)
if coins[-1][0] != 0:
while coins[0][0] == 0:
coins = coins[1:] + [ coins[0] ]
return [value for height, value in coins]
def get_account_name(self, k):
return self.labels.get(k, self.accounts[k].get_name(k))
def get_account_names(self):
account_names = {}
for k in self.accounts.keys():
account_names[k] = self.get_account_name(k)
return account_names
def get_account_addresses(self, acc_id, include_change=True):
if acc_id is None:
addr_list = self.addresses(include_change)
elif acc_id in self.accounts:
acc = self.accounts[acc_id]
addr_list = acc.get_addresses(0)
if include_change:
addr_list += acc.get_addresses(1)
return addr_list
def get_account_from_address(self, addr):
"Returns the account that contains this address, or None"
for acc_id in self.accounts: # similar to get_address_index but simpler
if addr in self.get_account_addresses(acc_id):
return acc_id
return None
def get_account_balance(self, account):
return self.get_balance(self.get_account_addresses(account))
def get_frozen_balance(self):
return self.get_balance(self.frozen_addresses)
def get_balance(self, domain=None):
if domain is None:
domain = self.addresses(True)
cc = uu = xx = 0
for addr in domain:
c, u, x = self.get_addr_balance(addr)
cc += c
uu += u
xx += x
return cc, uu, xx
def set_fee(self, fee, save = True):
self.fee_per_kb = fee
self.storage.put('fee_per_kb', self.fee_per_kb, save)
def get_address_history(self, address):
with self.lock:
return self.history.get(address, [])
def get_status(self, h):
if not h:
return None
status = ''
for tx_hash, height in h:
status += tx_hash + ':%d:' % height
return hashlib.sha256( status ).digest().encode('hex')
def find_pay_to_pubkey_address(self, prevout_hash, prevout_n):
dd = self.txo.get(prevout_hash, {})
for addr, l in dd.items():
for n, v, is_cb in l:
if n == prevout_n:
print_error("found pay-to-pubkey address:", addr)
return addr
def add_transaction(self, tx_hash, tx, tx_height):
is_coinbase = tx.inputs[0].get('is_coinbase') == True
with self.transaction_lock:
# add inputs
self.txi[tx_hash] = d = {}
for txi in tx.inputs:
addr = txi.get('address')
if not txi.get('is_coinbase'):
prevout_hash = txi['prevout_hash']
prevout_n = txi['prevout_n']
ser = prevout_hash + ':%d'%prevout_n
if addr == "(pubkey)":
addr = self.find_pay_to_pubkey_address(prevout_hash, prevout_n)
# find value from prev output
if addr and self.is_mine(addr):
dd = self.txo.get(prevout_hash, {})
for n, v, is_cb in dd.get(addr, []):
if n == prevout_n:
if d.get(addr) is None:
d[addr] = []
d[addr].append((ser, v))
break
else:
self.pruned_txo[ser] = tx_hash
# add outputs
self.txo[tx_hash] = d = {}
for n, txo in enumerate(tx.outputs):
ser = tx_hash + ':%d'%n
_type, x, v = txo
if _type == 'address':
addr = x
elif _type == 'pubkey':
addr = public_key_to_bc_address(x.decode('hex'))
else:
addr = None
if addr and self.is_mine(addr):
if d.get(addr) is None:
d[addr] = []
d[addr].append((n, v, is_coinbase))
# give v to txi that spends me
next_tx = self.pruned_txo.get(ser)
if next_tx is not None:
self.pruned_txo.pop(ser)
dd = self.txi.get(next_tx, {})
if dd.get(addr) is None:
dd[addr] = []
dd[addr].append((ser, v))
# save
self.transactions[tx_hash] = tx
def remove_transaction(self, tx_hash, tx_height):
with self.transaction_lock:
print_error("removing tx from history", tx_hash)
#tx = self.transactions.pop(tx_hash)
for ser, hh in self.pruned_txo.items():
if hh == tx_hash:
self.pruned_txo.pop(ser)
# add tx to pruned_txo, and undo the txi addition
for next_tx, dd in self.txi.items():
for addr, l in dd.items():
ll = l[:]
for item in ll:
ser, v = item
prev_hash, prev_n = ser.split(':')
if prev_hash == tx_hash:
l.remove(item)
self.pruned_txo[ser] = next_tx
if l == []:
dd.pop(addr)
else:
dd[addr] = l
self.txi.pop(tx_hash)
self.txo.pop(tx_hash)
def receive_tx_callback(self, tx_hash, tx, tx_height):
self.add_transaction(tx_hash, tx, tx_height)
#self.network.pending_transactions_for_notifications.append(tx)
self.add_unverified_tx(tx_hash, tx_height)
def receive_history_callback(self, addr, hist):
with self.lock:
old_hist = self.history.get(addr, [])
for tx_hash, height in old_hist:
if (tx_hash, height) not in hist:
# remove tx if it's not referenced in histories
self.tx_addr_hist[tx_hash].remove(addr)
if not self.tx_addr_hist[tx_hash]:
self.remove_transaction(tx_hash, height)
self.history[addr] = hist
self.storage.put('addr_history', self.history, True)
for tx_hash, tx_height in hist:
# add it in case it was previously unconfirmed
self.add_unverified_tx(tx_hash, tx_height)
# add reference in tx_addr_hist
s = self.tx_addr_hist.get(tx_hash, set())
s.add(addr)
self.tx_addr_hist[tx_hash] = s
# if addr is new, we have to recompute txi and txo
tx = self.transactions.get(tx_hash)
if tx is not None and self.txi.get(tx_hash, {}).get(addr) is None and self.txo.get(tx_hash, {}).get(addr) is None:
tx.deserialize()
self.add_transaction(tx_hash, tx, tx_height)
def get_history(self, domain=None):
from collections import defaultdict
# get domain
if domain is None:
domain = self.get_account_addresses(None)
# 1. Get the history of each address in the domain, maintain the
# delta of a tx as the sum of its deltas on domain addresses
tx_deltas = defaultdict(int)
for addr in domain:
h = self.get_address_history(addr)
for tx_hash, height in h:
delta = self.get_tx_delta(tx_hash, addr)
if delta is None or tx_deltas[tx_hash] is None:
tx_deltas[tx_hash] = None
else:
tx_deltas[tx_hash] += delta
# 2. create sorted history
history = []
for tx_hash, delta in tx_deltas.items():
conf, timestamp = self.get_confirmations(tx_hash)
history.append((tx_hash, conf, delta, timestamp))
history.sort(key = lambda x: self.get_txpos(x[0]))
history.reverse()
# 3. add balance
c, u, x = self.get_balance(domain)
balance = c + u + x
h2 = []
for item in history:
tx_hash, conf, delta, timestamp = item
h2.append((tx_hash, conf, delta, timestamp, balance))
if balance is None or delta is None:
balance = None
else:
balance -= delta
h2.reverse()
# fixme: this may happen if history is incomplete
if balance not in [None, 0]:
print_error("Error: history not synchronized")
return []
return h2
def get_label(self, tx_hash):
label = self.labels.get(tx_hash)
is_default = (label == '') or (label is None)
if is_default:
label = self.get_default_label(tx_hash)
return label, is_default
def get_default_label(self, tx_hash):
if self.txi.get(tx_hash) == {}:
d = self.txo.get(tx_hash, {})
labels = []
for addr in d.keys():
label = self.labels.get(addr)
if label:
labels.append(label)
return ', '.join(labels)
return ''
def get_tx_fee(self, tx):
# this method can be overloaded
return tx.get_fee()
def estimated_fee(self, tx):
estimated_size = len(tx.serialize(-1))/2
fee = int(self.fee_per_kb*estimated_size/1000.)
if fee < MIN_RELAY_TX_FEE: # and tx.requires_fee(self):
fee = MIN_RELAY_TX_FEE
return fee
def make_unsigned_transaction(self, coins, outputs, fixed_fee=None, change_addr=None):
# check outputs
for type, data, value in outputs:
if type == 'address':
assert is_address(data), "Address " + data + " is invalid!"
amount = sum(map(lambda x:x[2], outputs))
total = fee = 0
inputs = []
tx = Transaction.from_io(inputs, outputs)
# add old inputs first
for item in coins:
v = item.get('value')
total += v
self.add_input_info(item)
tx.add_input(item)
# no need to estimate fee until we have reached desired amount
if total < amount:
continue
fee = fixed_fee if fixed_fee is not None else self.estimated_fee(tx)
if total >= amount + fee:
break
else:
raise NotEnoughFunds()
# remove unneeded inputs
for item in sorted(tx.inputs, key=itemgetter('value')):
v = item.get('value')
if total - v >= amount + fee:
tx.inputs.remove(item)
total -= v
fee = fixed_fee if fixed_fee is not None else self.estimated_fee(tx)
else:
break
print_error("using %d inputs"%len(tx.inputs))
# change address
if not change_addr:
# send change to one of the accounts involved in the tx
address = inputs[0].get('address')
account, _ = self.get_address_index(address)
if self.use_change and self.accounts[account].has_change():
# New change addresses are created only after a few confirmations.
# Choose an unused change address if any, otherwise take one at random
change_addrs = self.accounts[account].get_addresses(1)[-self.gap_limit_for_change:]
for change_addr in change_addrs:
if self.get_num_tx(change_addr) == 0:
break
else:
change_addr = random.choice(change_addrs)
else:
change_addr = address
# if change is above dust threshold, add a change output.
change_amount = total - ( amount + fee )
if fixed_fee is not None and change_amount > 0:
tx.outputs.append(('address', change_addr, change_amount))
elif change_amount > DUST_THRESHOLD:
tx.outputs.append(('address', change_addr, change_amount))
# recompute fee including change output
fee = self.estimated_fee(tx)
# remove change output
tx.outputs.pop()
# if change is still above dust threshold, re-add change output.
change_amount = total - ( amount + fee )
if change_amount > DUST_THRESHOLD:
tx.outputs.append(('address', change_addr, change_amount))
print_error('change', change_amount)
else:
print_error('not keeping dust', change_amount)
else:
print_error('not keeping dust', change_amount)
# Sort the inputs and outputs deterministically
tx.BIP_LI01_sort()
run_hook('make_unsigned_transaction', tx)
return tx
def mktx(self, outputs, password, fee=None, change_addr=None, domain=None):
coins = self.get_spendable_coins(domain)
tx = self.make_unsigned_transaction(coins, outputs, fee, change_addr)
self.sign_transaction(tx, password)
return tx
def add_input_info(self, txin):
address = txin['address']
account_id, sequence = self.get_address_index(address)
account = self.accounts[account_id]
redeemScript = account.redeem_script(*sequence)
pubkeys = account.get_pubkeys(*sequence)
x_pubkeys = account.get_xpubkeys(*sequence)
# sort pubkeys and x_pubkeys, using the order of pubkeys
pubkeys, x_pubkeys = zip( *sorted(zip(pubkeys, x_pubkeys)))
txin['pubkeys'] = list(pubkeys)
txin['x_pubkeys'] = list(x_pubkeys)
txin['signatures'] = [None] * len(pubkeys)
if redeemScript:
txin['redeemScript'] = redeemScript
txin['num_sig'] = account.m
else:
txin['redeemPubkey'] = account.get_pubkey(*sequence)
txin['num_sig'] = 1
def sign_transaction(self, tx, password):
if self.is_watching_only():
return
# check that the password is correct. This will raise if it's not.
self.check_password(password)
keypairs = {}
x_pubkeys = tx.inputs_to_sign()
for x in x_pubkeys:
sec = self.get_private_key_from_xpubkey(x, password)
if sec:
keypairs[ x ] = sec
if keypairs:
tx.sign(keypairs)
run_hook('sign_transaction', tx, password)
def sendtx(self, tx):
# synchronous
h = self.send_tx(tx)
self.tx_event.wait()
return self.receive_tx(h, tx)
def send_tx(self, tx):
# asynchronous
self.tx_event.clear()
self.network.send([('blockchain.transaction.broadcast', [str(tx)])], self.on_broadcast)
return tx.hash()
def on_broadcast(self, r):
self.tx_result = r.get('result')
self.tx_event.set()
def receive_tx(self, tx_hash, tx):
out = self.tx_result
if out != tx_hash:
return False, "error: " + out
run_hook('receive_tx', tx, self)
return True, out
def update_password(self, old_password, new_password):
if new_password == '':
new_password = None
if self.has_seed():
decoded = self.get_seed(old_password)
self.seed = pw_encode( decoded, new_password)
self.storage.put('seed', self.seed, True)
imported_account = self.accounts.get(IMPORTED_ACCOUNT)
if imported_account:
imported_account.update_password(old_password, new_password)
self.save_accounts()
if hasattr(self, 'master_private_keys'):
for k, v in self.master_private_keys.items():
b = pw_decode(v, old_password)
c = pw_encode(b, new_password)
self.master_private_keys[k] = c
self.storage.put('master_private_keys', self.master_private_keys, True)
self.use_encryption = (new_password != None)
self.storage.put('use_encryption', self.use_encryption,True)
def is_frozen(self, addr):
return addr in self.frozen_addresses
def set_frozen_state(self, addrs, freeze):
'''Set frozen state of the addresses to FREEZE, True or False'''
if all(self.is_mine(addr) for addr in addrs):
if freeze:
self.frozen_addresses |= set(addrs)
else:
self.frozen_addresses -= set(addrs)
self.storage.put('frozen_addresses', list(self.frozen_addresses), True)
return True
return False
def set_verifier(self, verifier):
self.verifier = verifier
# review transactions that are in the history
for addr, hist in self.history.items():
for tx_hash, tx_height in hist:
# add it in case it was previously unconfirmed
self.add_unverified_tx (tx_hash, tx_height)
# if we are on a pruning server, remove unverified transactions
with self.lock:
vr = self.verified_tx.keys() + self.unverified_tx.keys()
for tx_hash in self.transactions.keys():
if tx_hash not in vr:
print_error("removing transaction", tx_hash)
self.transactions.pop(tx_hash)
def start_threads(self, network):
from verifier import SPV
self.network = network
if self.network is not None:
self.verifier = SPV(self.network, self)
self.verifier.start()
self.set_verifier(self.verifier)
self.synchronizer = WalletSynchronizer(self, network)
network.jobs.append(self.synchronizer.main_loop)
else:
self.verifier = None
self.synchronizer = None
def stop_threads(self):
if self.network:
self.verifier.stop()
self.network.jobs.remove(self.synchronizer.main_loop)
self.synchronizer = None
self.storage.put('stored_height', self.get_local_height(), True)
def restore(self, cb):
pass
def get_accounts(self):
return self.accounts
def add_account(self, account_id, account):
self.accounts[account_id] = account
self.save_accounts()
def save_accounts(self):
d = {}
for k, v in self.accounts.items():
d[k] = v.dump()
self.storage.put('accounts', d, True)
def can_import(self):
return not self.is_watching_only()
def can_export(self):
return not self.is_watching_only()
def is_used(self, address):
h = self.history.get(address,[])
c, u, x = self.get_addr_balance(address)
return len(h), len(h) > 0 and c + u + x == 0
def is_empty(self, address):
c, u, x = self.get_addr_balance(address)
return c+u+x == 0
def address_is_old(self, address, age_limit=2):
age = -1
h = self.history.get(address, [])
for tx_hash, tx_height in h:
if tx_height == 0:
tx_age = 0
else:
tx_age = self.get_local_height() - tx_height + 1
if tx_age > age:
age = tx_age
return age > age_limit
def can_sign(self, tx):
if self.is_watching_only():
return False
if tx.is_complete():
return False
for x in tx.inputs_to_sign():
if self.can_sign_xpubkey(x):
return True
return False
def get_private_key_from_xpubkey(self, x_pubkey, password):
if x_pubkey[0:2] in ['02','03','04']:
addr = bitcoin.public_key_to_bc_address(x_pubkey.decode('hex'))
if self.is_mine(addr):
return self.get_private_key(addr, password)[0]
elif x_pubkey[0:2] == 'ff':
xpub, sequence = BIP32_Account.parse_xpubkey(x_pubkey)
for k, v in self.master_public_keys.items():
if v == xpub:
xprv = self.get_master_private_key(k, password)
if xprv:
_, _, _, c, k = deserialize_xkey(xprv)
return bip32_private_key(sequence, k, c)
elif x_pubkey[0:2] == 'fe':
xpub, sequence = OldAccount.parse_xpubkey(x_pubkey)
for k, account in self.accounts.items():
if xpub in account.get_master_pubkeys():
pk = account.get_private_key(sequence, self, password)
return pk[0]
elif x_pubkey[0:2] == 'fd':
addrtype = ord(x_pubkey[2:4].decode('hex'))
addr = hash_160_to_bc_address(x_pubkey[4:].decode('hex'), addrtype)
if self.is_mine(addr):
return self.get_private_key(addr, password)[0]
else:
raise BaseException("z")
def can_sign_xpubkey(self, x_pubkey):
if x_pubkey[0:2] in ['02','03','04']:
addr = bitcoin.public_key_to_bc_address(x_pubkey.decode('hex'))
return self.is_mine(addr)
elif x_pubkey[0:2] == 'ff':
if not isinstance(self, BIP32_Wallet): return False
xpub, sequence = BIP32_Account.parse_xpubkey(x_pubkey)
return xpub in [ self.master_public_keys[k] for k in self.master_private_keys.keys() ]
elif x_pubkey[0:2] == 'fe':
if not isinstance(self, OldWallet): return False
xpub, sequence = OldAccount.parse_xpubkey(x_pubkey)
return xpub == self.get_master_public_key()
elif x_pubkey[0:2] == 'fd':
addrtype = ord(x_pubkey[2:4].decode('hex'))
addr = hash_160_to_bc_address(x_pubkey[4:].decode('hex'), addrtype)
return self.is_mine(addr)
else:
raise BaseException("z")
def is_watching_only(self):
False
def can_change_password(self):
return not self.is_watching_only()
def get_unused_address(self, account):
# fixme: use slots from expired requests
domain = self.get_account_addresses(account, include_change=False)
for addr in domain:
if not self.history.get(addr) and addr not in self.receive_requests.keys():
return addr
def get_payment_request(self, addr, config):
import util
r = self.receive_requests.get(addr)
if not r:
return
out = copy.copy(r)
out['URI'] = 'verge:' + addr + '?amount=' + util.format_satoshis(out.get('amount'))
out['status'] = self.get_request_status(addr)
# check if bip70 file exists
rdir = config.get('requests_dir')
if rdir:
key = out.get('id', addr)
path = os.path.join(rdir, key)
if os.path.exists(path):
baseurl = 'file://' + rdir
rewrite = config.get('url_rewrite')
if rewrite:
baseurl = baseurl.replace(*rewrite)
out['request_url'] = os.path.join(baseurl, key)
out['URI'] += '&r=' + out['request_url']
out['index_url'] = os.path.join(baseurl, 'index.html') + '?id=' + key
return out
def get_request_status(self, key):
from paymentrequest import PR_PAID, PR_UNPAID, PR_UNKNOWN, PR_EXPIRED
r = self.receive_requests[key]
address = r['address']
amount = r.get('amount')
timestamp = r.get('time', 0)
if timestamp and type(timestamp) != int:
timestamp = 0
expiration = r.get('exp')
if expiration and type(expiration) != int:
expiration = 0
if amount:
if self.up_to_date:
paid = amount <= self.get_addr_received(address)
status = PR_PAID if paid else PR_UNPAID
if status == PR_UNPAID and expiration is not None and time.time() > timestamp + expiration:
status = PR_EXPIRED
else:
status = PR_UNKNOWN
else:
status = PR_UNKNOWN
return status
def make_payment_request(self, addr, amount, message, expiration):
timestamp = int(time.time())
_id = Hash(addr + "%d"%timestamp).encode('hex')[0:10]
r = {'time':timestamp, 'amount':amount, 'exp':expiration, 'address':addr, 'memo':message, 'id':_id}
return r
def sign_payment_request(self, key, alias, alias_addr, password):
req = self.receive_requests.get(key)
alias_privkey = self.get_private_key(alias_addr, password)[0]
pr = paymentrequest.make_unsigned_request(req)
paymentrequest.sign_request_with_alias(pr, alias, alias_privkey)
req['name'] = pr.pki_data
req['sig'] = pr.signature.encode('hex')
self.receive_requests[key] = req
self.storage.put('payment_requests', self.receive_requests)
def add_payment_request(self, req, config):
import os
addr = req['address']
amount = req.get('amount')
message = req.get('memo')
self.receive_requests[addr] = req
self.storage.put('payment_requests', self.receive_requests)
self.set_label(addr, message) # should be a default label
rdir = config.get('requests_dir')
if rdir and amount is not None:
key = req.get('id', addr)
pr = paymentrequest.make_request(config, req)
path = os.path.join(rdir, key)
with open(path, 'w') as f:
f.write(pr.SerializeToString())
# reload
req = self.get_payment_request(addr, config)
with open(os.path.join(rdir, key + '.json'), 'w') as f:
f.write(json.dumps(req))
return req
def remove_payment_request(self, addr, config):
if addr not in self.receive_requests:
return False
r = self.receive_requests.pop(addr)
rdir = config.get('requests_dir')
if rdir:
key = r.get('id', addr)
for s in ['.json', '']:
n = os.path.join(rdir, key + s)
if os.path.exists(n):
os.unlink(n)
self.storage.put('payment_requests', self.receive_requests)
return True
def get_sorted_requests(self, config):
return sorted(map(lambda x: self.get_payment_request(x, config), self.receive_requests.keys()), key=lambda x: x.get('time', 0))
class Imported_Wallet(Abstract_Wallet):
wallet_type = 'imported'
def __init__(self, storage):
Abstract_Wallet.__init__(self, storage)
a = self.accounts.get(IMPORTED_ACCOUNT)
if not a:
self.accounts[IMPORTED_ACCOUNT] = ImportedAccount({'imported':{}})
def is_watching_only(self):
acc = self.accounts[IMPORTED_ACCOUNT]
n = acc.keypairs.values()
return len(n) > 0 and n == [[None, None]] * len(n)
def has_seed(self):
return False
def is_deterministic(self):
return False
def check_password(self, password):
self.accounts[IMPORTED_ACCOUNT].get_private_key((0,0), self, password)
def is_used(self, address):
h = self.history.get(address,[])
return len(h), False
def get_master_public_keys(self):
return {}
def is_beyond_limit(self, address, account, is_change):
return False
class Deterministic_Wallet(Abstract_Wallet):
def __init__(self, storage):
Abstract_Wallet.__init__(self, storage)
def has_seed(self):
return self.seed != ''
def is_deterministic(self):
return True
def is_watching_only(self):
return not self.has_seed()
def add_seed(self, seed, password):
if self.seed:
raise Exception("a seed exists")
self.seed_version, self.seed = self.format_seed(seed)
if password:
self.seed = pw_encode( self.seed, password)
self.use_encryption = True
else:
self.use_encryption = False
self.storage.put('seed', self.seed, False)
self.storage.put('seed_version', self.seed_version, False)
self.storage.put('use_encryption', self.use_encryption,True)
def get_seed(self, password):
return pw_decode(self.seed, password)
def get_mnemonic(self, password):
return self.get_seed(password)
def change_gap_limit(self, value):
assert isinstance(value, int), 'gap limit must be of type int, not of %s'%type(value)
if value >= self.gap_limit:
self.gap_limit = value
self.storage.put('gap_limit', self.gap_limit, True)
return True
elif value >= self.min_acceptable_gap():
for key, account in self.accounts.items():
addresses = account.get_addresses(False)
k = self.num_unused_trailing_addresses(addresses)
n = len(addresses) - k + value
account.receiving_pubkeys = account.receiving_pubkeys[0:n]
account.receiving_addresses = account.receiving_addresses[0:n]
self.gap_limit = value
self.storage.put('gap_limit', self.gap_limit, True)
self.save_accounts()
return True
else:
return False
def num_unused_trailing_addresses(self, addresses):
k = 0
for a in addresses[::-1]:
if self.history.get(a):break
k = k + 1
return k
def min_acceptable_gap(self):
# fixme: this assumes wallet is synchronized
n = 0
nmax = 0
for account in self.accounts.values():
addresses = account.get_addresses(0)
k = self.num_unused_trailing_addresses(addresses)
for a in addresses[0:-k]:
if self.history.get(a):
n = 0
else:
n += 1
if n > nmax: nmax = n
return nmax + 1
def default_account(self):
return self.accounts['0']
def create_new_address(self, account=None, for_change=0):
if account is None:
account = self.default_account()
address = account.create_new_address(for_change)
self.add_address(address)
return address
def add_address(self, address):
if address not in self.history:
self.history[address] = []
if self.synchronizer:
self.synchronizer.add(address)
self.save_accounts()
def synchronize(self):
with self.lock:
for account in self.accounts.values():
account.synchronize(self)
def restore(self, callback):
from i18n import _
def wait_for_wallet():
self.set_up_to_date(False)
while not self.is_up_to_date():
msg = "%s\n%s %d"%(
_("Please wait..."),
_("Addresses generated:"),
len(self.addresses(True)))
apply(callback, (msg,))
time.sleep(0.1)
def wait_for_network():
while not self.network.is_connected():
msg = "%s \n" % (_("Connecting..."))
apply(callback, (msg,))
time.sleep(0.1)
# wait until we are connected, because the user might have selected another server
if self.network:
wait_for_network()
wait_for_wallet()
else:
self.synchronize()
def is_beyond_limit(self, address, account, is_change):
if type(account) == ImportedAccount:
return False
addr_list = account.get_addresses(is_change)
i = addr_list.index(address)
prev_addresses = addr_list[:max(0, i)]
limit = self.gap_limit_for_change if is_change else self.gap_limit
if len(prev_addresses) < limit:
return False
prev_addresses = prev_addresses[max(0, i - limit):]
for addr in prev_addresses:
if self.history.get(addr):
return False
return True
def get_action(self):
if not self.get_master_public_key():
return 'create_seed'
if not self.accounts:
return 'create_accounts'
def get_master_public_keys(self):
out = {}
for k, account in self.accounts.items():
if type(account) == ImportedAccount:
continue
name = self.get_account_name(k)
mpk_text = '\n\n'.join(account.get_master_pubkeys())
out[name] = mpk_text
return out
class BIP32_Wallet(Deterministic_Wallet):
# abstract class, bip32 logic
root_name = 'x/'
def __init__(self, storage):
Deterministic_Wallet.__init__(self, storage)
self.master_public_keys = storage.get('master_public_keys', {})
self.master_private_keys = storage.get('master_private_keys', {})
self.gap_limit = storage.get('gap_limit', 20)
def is_watching_only(self):
return not bool(self.master_private_keys)
def can_import(self):
return False
def get_master_public_key(self):
return self.master_public_keys.get(self.root_name)
def get_master_private_key(self, account, password):
k = self.master_private_keys.get(account)
if not k: return
xprv = pw_decode(k, password)
try:
deserialize_xkey(xprv)
except:
raise InvalidPassword()
return xprv
def check_password(self, password):
xpriv = self.get_master_private_key(self.root_name, password)
xpub = self.master_public_keys[self.root_name]
if deserialize_xkey(xpriv)[3] != deserialize_xkey(xpub)[3]:
raise InvalidPassword()
def add_master_public_key(self, name, xpub):
if xpub in self.master_public_keys.values():
raise BaseException('Duplicate master public key')
self.master_public_keys[name] = xpub
self.storage.put('master_public_keys', self.master_public_keys, True)
def add_master_private_key(self, name, xpriv, password):
self.master_private_keys[name] = pw_encode(xpriv, password)
self.storage.put('master_private_keys', self.master_private_keys, True)
def derive_xkeys(self, root, derivation, password):
x = self.master_private_keys[root]
root_xprv = pw_decode(x, password)
xprv, xpub = bip32_private_derivation(root_xprv, root, derivation)
return xpub, xprv
def create_master_keys(self, password):
seed = self.get_seed(password)
self.add_cosigner_seed(seed, self.root_name, password)
def add_cosigner_seed(self, seed, name, password, passphrase=''):
# we don't store the seed, only the master xpriv
xprv, xpub = bip32_root(self.mnemonic_to_seed(seed, passphrase))
xprv, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation)
self.add_master_public_key(name, xpub)
self.add_master_private_key(name, xprv, password)
def add_cosigner_xpub(self, seed, name):
# store only master xpub
xprv, xpub = bip32_root(self.mnemonic_to_seed(seed,''))
xprv, xpub = bip32_private_derivation(xprv, "m/", self.root_derivation)
self.add_master_public_key(name, xpub)
def mnemonic_to_seed(self, seed, password):
return Mnemonic.mnemonic_to_seed(seed, password)
def make_seed(self, lang=None):
return Mnemonic(lang).make_seed()
def format_seed(self, seed):
return NEW_SEED_VERSION, ' '.join(seed.split())
class BIP32_Simple_Wallet(BIP32_Wallet):
# Wallet with a single BIP32 account, no seed
# gap limit 20
wallet_type = 'xpub'
def create_xprv_wallet(self, xprv, password):
xpub = bitcoin.xpub_from_xprv(xprv)
account = BIP32_Account({'xpub':xpub})
self.storage.put('seed_version', self.seed_version, True)
self.add_master_private_key(self.root_name, xprv, password)
self.add_master_public_key(self.root_name, xpub)
self.add_account('0', account)
self.use_encryption = (password != None)
self.storage.put('use_encryption', self.use_encryption,True)
def create_xpub_wallet(self, xpub):
account = BIP32_Account({'xpub':xpub})
self.storage.put('seed_version', self.seed_version, True)
self.add_master_public_key(self.root_name, xpub)
self.add_account('0', account)
class BIP32_HD_Wallet(BIP32_Wallet):
# wallet that can create accounts
def __init__(self, storage):
self.next_account = storage.get('next_account2', None)
BIP32_Wallet.__init__(self, storage)
def can_create_accounts(self):
return self.root_name in self.master_private_keys.keys()
def addresses(self, b=True):
l = BIP32_Wallet.addresses(self, b)
if self.next_account:
_, _, _, next_address = self.next_account
if next_address not in l:
l.append(next_address)
return l
def get_address_index(self, address):
if self.next_account:
next_id, next_xpub, next_pubkey, next_address = self.next_account
if address == next_address:
return next_id, (0,0)
return BIP32_Wallet.get_address_index(self, address)
def num_accounts(self):
keys = []
for k, v in self.accounts.items():
if type(v) != BIP32_Account:
continue
keys.append(k)
i = 0
while True:
account_id = '%d'%i
if account_id not in keys:
break
i += 1
return i
def get_next_account(self, password):
account_id = '%d'%self.num_accounts()
derivation = self.root_name + "%d'"%int(account_id)
xpub, xprv = self.derive_xkeys(self.root_name, derivation, password)
self.add_master_public_key(derivation, xpub)
if xprv:
self.add_master_private_key(derivation, xprv, password)
account = BIP32_Account({'xpub':xpub})
addr, pubkey = account.first_address()
self.add_address(addr)
return account_id, xpub, pubkey, addr
def create_main_account(self, password):
# First check the password is valid (this raises if it isn't).
self.check_password(password)
assert self.num_accounts() == 0
self.create_account('Main account', password)
def create_account(self, name, password):
account_id, xpub, _, _ = self.get_next_account(password)
account = BIP32_Account({'xpub':xpub})
self.add_account(account_id, account)
self.set_label(account_id, name)
# add address of the next account
self.next_account = self.get_next_account(password)
self.storage.put('next_account2', self.next_account)
def account_is_pending(self, k):
return type(self.accounts.get(k)) == PendingAccount
def delete_pending_account(self, k):
assert type(self.accounts.get(k)) == PendingAccount
self.accounts.pop(k)
self.save_accounts()
def create_pending_account(self, name, password):
if self.next_account is None:
self.next_account = self.get_next_account(password)
self.storage.put('next_account2', self.next_account)
next_id, next_xpub, next_pubkey, next_address = self.next_account
if name:
self.set_label(next_id, name)
self.accounts[next_id] = PendingAccount({'pending':True, 'address':next_address, 'pubkey':next_pubkey})
self.save_accounts()
def synchronize(self):
# synchronize existing accounts
BIP32_Wallet.synchronize(self)
if self.next_account is None and not self.use_encryption:
try:
self.next_account = self.get_next_account(None)
self.storage.put('next_account2', self.next_account)
except:
print_error('cannot get next account')
# check pending account
if self.next_account is not None:
next_id, next_xpub, next_pubkey, next_address = self.next_account
if self.address_is_old(next_address):
print_error("creating account", next_id)
self.add_account(next_id, BIP32_Account({'xpub':next_xpub}))
# here the user should get a notification
self.next_account = None
self.storage.put('next_account2', self.next_account)
elif self.history.get(next_address, []):
if next_id not in self.accounts:
print_error("create pending account", next_id)
self.accounts[next_id] = PendingAccount({'pending':True, 'address':next_address, 'pubkey':next_pubkey})
self.save_accounts()
class NewWallet(BIP32_Wallet, Mnemonic):
# Standard wallet
root_derivation = "m/"
wallet_type = 'standard'
def create_main_account(self, password):
xpub = self.master_public_keys.get("x/")
account = BIP32_Account({'xpub':xpub})
self.add_account('0', account)
class Multisig_Wallet(BIP32_Wallet, Mnemonic):
# generic m of n
root_name = "x1/"
root_derivation = "m/"
def __init__(self, storage):
BIP32_Wallet.__init__(self, storage)
self.wallet_type = storage.get('wallet_type')
m = re.match('(\d+)of(\d+)', self.wallet_type)
self.m = int(m.group(1))
self.n = int(m.group(2))
def load_accounts(self):
self.accounts = {}
d = self.storage.get('accounts', {})
v = d.get('0')
if v:
if v.get('xpub3'):
v['xpubs'] = [v['xpub'], v['xpub2'], v['xpub3']]
elif v.get('xpub2'):
v['xpubs'] = [v['xpub'], v['xpub2']]
self.accounts = {'0': Multisig_Account(v)}
def create_main_account(self, password):
account = Multisig_Account({'xpubs': self.master_public_keys.values(), 'm': self.m})
self.add_account('0', account)
def get_master_public_keys(self):
return self.master_public_keys
def get_action(self):
for i in range(self.n):
if self.master_public_keys.get("x%d/"%(i+1)) is None:
return 'create_seed' if i == 0 else 'add_cosigners'
if not self.accounts:
return 'create_accounts'
class OldWallet(Deterministic_Wallet):
wallet_type = 'old'
def __init__(self, storage):
Deterministic_Wallet.__init__(self, storage)
self.gap_limit = storage.get('gap_limit', 5)
def make_seed(self):
import old_mnemonic
seed = random_seed(128)
return ' '.join(old_mnemonic.mn_encode(seed))
def format_seed(self, seed):
import old_mnemonic
# see if seed was entered as hex
seed = seed.strip()
try:
assert seed
seed.decode('hex')
return OLD_SEED_VERSION, str(seed)
except Exception:
pass
words = seed.split()
seed = old_mnemonic.mn_decode(words)
if not seed:
raise Exception("Invalid seed")
return OLD_SEED_VERSION, seed
def create_master_keys(self, password):
seed = self.get_seed(password)
mpk = OldAccount.mpk_from_seed(seed)
self.storage.put('master_public_key', mpk, True)
def get_master_public_key(self):
return self.storage.get("master_public_key")
def get_master_public_keys(self):
return {'Main Account':self.get_master_public_key()}
def create_main_account(self, password):
mpk = self.storage.get("master_public_key")
self.create_account(mpk)
def create_account(self, mpk):
self.accounts['0'] = OldAccount({'mpk':mpk, 0:[], 1:[]})
self.save_accounts()
def create_watching_only_wallet(self, mpk):
self.seed_version = OLD_SEED_VERSION
self.storage.put('seed_version', self.seed_version, False)
self.storage.put('master_public_key', mpk, True)
self.create_account(mpk)
def get_seed(self, password):
seed = pw_decode(self.seed, password).encode('utf8')
return seed
def check_password(self, password):
seed = self.get_seed(password)
self.accounts['0'].check_seed(seed)
def get_mnemonic(self, password):
import old_mnemonic
s = self.get_seed(password)
return ' '.join(old_mnemonic.mn_encode(s))
wallet_types = [
# category type description constructor
('standard', 'old', ("Old wallet"), OldWallet),
('standard', 'xpub', ("BIP32 Import"), BIP32_Simple_Wallet),
('standard', 'standard', ("Standard wallet"), NewWallet),
('standard', 'imported', ("Imported wallet"), Imported_Wallet),
('multisig', '2of2', ("Multisig wallet (2 of 2)"), Multisig_Wallet),
('multisig', '2of3', ("Multisig wallet (2 of 3)"), Multisig_Wallet)
]
# former WalletFactory
class Wallet(object):
"""The main wallet "entry point".
This class is actually a factory that will return a wallet of the correct
type when passed a WalletStorage instance."""
def __new__(self, storage):
seed_version = storage.get('seed_version')
if not seed_version:
seed_version = OLD_SEED_VERSION if len(storage.get('master_public_key','')) == 128 else NEW_SEED_VERSION
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
msg = "Your wallet has an unsupported seed version."
msg += '\n\nWallet file: %s' % os.path.abspath(storage.path)
if seed_version in [5, 7, 8, 9, 10]:
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
if seed_version == 6:
# version 1.9.8 created v6 wallets when an incorrect seed was entered in the restore dialog
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
if storage.get('master_public_keys') is None and storage.get('master_private_keys') is None and storage.get('imported_keys') is None:
# pbkdf2 was not included with the binaries, and wallet creation aborted.
msg += "\nIt does not contain any keys, and can safely be removed."
else:
# creation was complete if electrum was run from source
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
raise BaseException(msg)
wallet_type = storage.get('wallet_type')
if wallet_type:
for cat, t, name, c in wallet_types:
if t == wallet_type:
WalletClass = c
break
else:
if re.match('(\d+)of(\d+)', wallet_type):
WalletClass = Multisig_Wallet
else:
raise BaseException('unknown wallet type', wallet_type)
else:
if seed_version == OLD_SEED_VERSION:
WalletClass = OldWallet
else:
WalletClass = NewWallet
return WalletClass(storage)
@classmethod
def is_seed(self, seed):
if not seed:
return False
elif is_old_seed(seed):
return True
elif is_new_seed(seed):
return True
else:
return False
@classmethod
def is_old_mpk(self, mpk):
try:
int(mpk, 16)
assert len(mpk) == 128
return True
except:
return False
@classmethod
def is_xpub(self, text):
try:
assert text[0:4] == 'xpub'
deserialize_xkey(text)
return True
except:
return False
@classmethod
def is_xprv(self, text):
try:
assert text[0:4] == 'xprv'
deserialize_xkey(text)
return True
except:
return False
@classmethod
def is_address(self, text):
if not text:
return False
for x in text.split():
if not bitcoin.is_address(x):
return False
return True
@classmethod
def is_private_key(self, text):
if not text:
return False
for x in text.split():
if not bitcoin.is_private_key(x):
return False
return True
@classmethod
def from_seed(self, seed, password, storage):
if is_old_seed(seed):
klass = OldWallet
elif is_new_seed(seed):
klass = NewWallet
w = klass(storage)
w.add_seed(seed, password)
w.create_master_keys(password)
w.create_main_account(password)
return w
@classmethod
def from_address(self, text, storage):
w = Imported_Wallet(storage)
for x in text.split():
w.accounts[IMPORTED_ACCOUNT].add(x, None, None, None)
w.save_accounts()
return w
@classmethod
def from_private_key(self, text, password, storage):
w = Imported_Wallet(storage)
w.update_password(None, password)
for x in text.split():
w.import_key(x, password)
return w
@classmethod
def from_old_mpk(self, mpk, storage):
w = OldWallet(storage)
w.seed = ''
w.create_watching_only_wallet(mpk)
return w
@classmethod
def from_xpub(self, xpub, storage):
w = BIP32_Simple_Wallet(storage)
w.create_xpub_wallet(xpub)
return w
@classmethod
def from_xprv(self, xprv, password, storage):
w = BIP32_Simple_Wallet(storage)
w.create_xprv_wallet(xprv, password)
return w
@classmethod
def from_multisig(klass, key_list, password, storage, wallet_type):
storage.put('wallet_type', wallet_type, True)
self = Multisig_Wallet(storage)
key_list = sorted(key_list, key = lambda x: klass.is_xpub(x))
for i, text in enumerate(key_list):
assert klass.is_seed(text) or klass.is_xprv(text) or klass.is_xpub(text)
name = "x%d/"%(i+1)
if klass.is_xprv(text):
xpub = bitcoin.xpub_from_xprv(text)
self.add_master_public_key(name, xpub)
self.add_master_private_key(name, text, password)
elif klass.is_xpub(text):
self.add_master_public_key(name, text)
elif klass.is_seed(text):
if name == 'x1/':
self.add_seed(text, password)
self.create_master_keys(password)
else:
self.add_cosigner_seed(text, name, password)
self.use_encryption = (password != None)
self.storage.put('use_encryption', self.use_encryption, True)
self.create_main_account(password)
return self
| harwee/electrum-xvg-tor | lib/wallet.py | Python | gpl-3.0 | 73,225 | 0.003086 |
{
'name' : 'Signature templates for user emails',
'version' : '1.0.0',
'author' : 'IT-Projects LLC, Ivan Yelizariev',
'license': 'GPL-3',
'category' : 'Social Network',
'website' : 'https://yelizariev.github.io',
'depends' : ['base'],
'data':[
'res_users_signature_views.xml',
'security/res_users_signature_security.xml',
'security/ir.model.access.csv',
],
'installable': True
}
| Antiun/yelizariev-addons | res_users_signature/__openerp__.py | Python | lgpl-3.0 | 443 | 0.015801 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 22 17:01:36 2019
@author: raf
"""
# IMPORT STUFF
from pdb import set_trace as stop
import copy
import numpy as np
from collections import OrderedDict
import string as st
import os
import pandas as pd
from vison.datamodel import cdp
from vison.support import files
from vison.fpa import fpa as fpamod
from vison.metatests.metacal import MetaCal
from vison.plot import plots_fpa as plfpa
from vison.support import vcal, utils
from vison.datamodel import core as vcore
from vison.ogse import ogse
from vison.inject import lib as ilib
import matplotlib.cm as cm
from matplotlib import pyplot as plt
plt.switch_backend('TkAgg')
from matplotlib.colors import Normalize
# END IMPORT
cols2keep = [
'test',
'sn_ccd1',
'sn_ccd2',
'sn_ccd3',
'sn_roe',
'sn_rpsu',
'exptime',
'vstart',
'vend',
'rdmode',
'flushes',
'siflsh',
'siflsh_p',
'swellw',
'swelldly',
'inisweep',
'cdpu_clk',
'chinj',
'chinj_on',
'chinj_of',
'id_wid',
'id_dly',
'chin_dly',
'v_tpump',
's_tpump',
'v_tp_mod',
's_tp_mod',
'v_tp_cnt',
's_tp_cnt',
'dwell_v',
'dwell_s',
'toi_fl',
'toi_tp',
'toi_ro',
'toi_ch',
'motr',
'motr_cnt',
'motr_siz',
'source',
'wave',
'mirr_on',
'mirr_pos',
'R1C1_TT',
'R1C1_TB',
'R1C2_TT',
'R1C2_TB',
'R1C3_TT',
'R1C3_TB',
'IDL',
'IDH',
'IG1_1_T',
'IG1_2_T',
'IG1_3_T',
'IG1_1_B',
'IG1_2_B',
'IG1_3_B',
'IG2_T',
'IG2_B',
'OD_1_T',
'OD_2_T',
'OD_3_T',
'OD_1_B',
'OD_2_B',
'OD_3_B',
'RD_T',
'RD_B',
'time',
'HK_CCD1_TEMP_T',
'HK_CCD2_TEMP_T',
'HK_CCD3_TEMP_T',
'HK_CCD1_TEMP_B',
'HK_CCD2_TEMP_B',
'HK_CCD3_TEMP_B',
'HK_CCD1_OD_T',
'HK_CCD2_OD_T',
'HK_CCD3_OD_T',
'HK_CCD1_OD_B',
'HK_CCD2_OD_B',
'HK_CCD3_OD_B',
'HK_COMM_RD_T',
'HK_COMM_RD_B',
'HK_CCD1_IG1_T',
'HK_CCD2_IG1_T',
'HK_CCD3_IG1_T',
'HK_CCD1_IG1_B',
'HK_CCD2_IG1_B',
'HK_CCD3_IG1_B',
'HK_COMM_IG2_T',
'HK_COMM_IG2_B',
'HK_FPGA_BIAS_ID2',
'HK_VID_PCB_TEMP_T',
'HK_VID_PCB_TEMP_B',
'HK_RPSU_TEMP1',
'HK_FPGA_PCB_TEMP_T',
'HK_FPGA_PCB_TEMP_B',
'HK_RPSU_TEMP_2',
'HK_RPSU_28V_PRI_I',
'chk_NPIXOFF',
'chk_NPIXSAT',
'offset_pre',
'offset_ove',
'std_pre',
'std_ove']
class MetaChinj01(MetaCal):
""" """
def __init__(self, **kwargs):
""" """
super(MetaChinj01, self).__init__(**kwargs)
self.testnames = ['CHINJ01']
self.incols = cols2keep
self.ParsedTable = OrderedDict()
allgains = files.cPickleRead(kwargs['cdps']['gain'])
self.cdps['GAIN'] = OrderedDict()
for block in self.blocks:
self.cdps['GAIN'][block] = allgains[block]['PTC01'].copy()
self.products['METAFIT'] = OrderedDict()
self.products['VERPROFILES'] = OrderedDict()
self.products['HORPROFILES'] = OrderedDict()
self.init_fignames()
self.init_outcdpnames()
def parse_single_test(self, jrep, block, testname, inventoryitem):
""" """
NCCDs = len(self.CCDs)
NQuads = len(self.Quads)
session = inventoryitem['session']
CCDkeys = ['CCD%i' % CCD for CCD in self.CCDs]
IndexS = vcore.vMultiIndex([vcore.vIndex('ix', vals=[0])])
IndexCQ = vcore.vMultiIndex([vcore.vIndex('ix', vals=[0]),
vcore.vIndex('CCD', vals=self.CCDs),
vcore.vIndex('Quad', vals=self.Quads)])
#idd = copy.deepcopy(inventoryitem['dd'])
sidd = self.parse_single_test_gen(jrep, block, testname, inventoryitem)
# TEST SCPECIFIC
# TO BE ADDED:
# OFFSETS: pre, img, ove
# RON: pre, img, ove
# REFERENCES TO PROFILES
CHAMBER = sidd.meta['inputs']['CHAMBER']
CHAMBER_key = CHAMBER[0]
chamber_v = np.array([CHAMBER_key])
sidd.addColumn(chamber_v, 'CHAMBERKEY', IndexS, ix=0)
block_v = np.array([block])
sidd.addColumn(block_v, 'BLOCK', IndexS, ix=0)
test_v = np.array([jrep + 1])
sidd.addColumn(test_v, 'REP', IndexS, ix=0)
test_v = np.array([session])
sidd.addColumn(test_v, 'SESSION', IndexS, ix=0)
test_v = np.array([testname])
sidd.addColumn(test_v, 'TEST', IndexS, ix=0)
productspath = os.path.join(inventoryitem['resroot'], 'products')
metafitcdp_pick = os.path.join(productspath,
os.path.split(sidd.products['METAFIT_CDP'])[-1])
metafitcdp = files.cPickleRead(metafitcdp_pick)
metafit = copy.deepcopy(metafitcdp['data']['ANALYSIS'])
metafitkey = '%s_%s_%s_%i' % (testname, block, session, jrep + 1)
self.products['METAFIT'][metafitkey] = copy.deepcopy(metafit)
metafitkey_v = np.array([metafitkey])
sidd.addColumn(metafitkey_v, 'METAFIT', IndexS, ix=0)
metacdp_pick = os.path.join(productspath, os.path.split(
sidd.products['META_CDP'])[-1]) # change to META_CDP
metacdp = files.cPickleRead(metacdp_pick)
meta = metacdp['data']['ANALYSIS'] # this is a pandas DataFrame
tmp_v_CQ = np.zeros((1, NCCDs, NQuads))
bgd_adu_v = tmp_v_CQ.copy()
ig1_thresh_v = tmp_v_CQ.copy()
ig1_notch_v = tmp_v_CQ.copy()
slope_v = tmp_v_CQ.copy()
n_adu_v = tmp_v_CQ.copy()
for iCCD, CCDk in enumerate(CCDkeys):
for kQ, Q in enumerate(self.Quads):
ixloc = np.where((meta['CCD'] == iCCD + 1) & (meta['Q'] == kQ + 1))
bgd_adu_v[0, iCCD, kQ] = meta['BGD_ADU'][ixloc[0][0]]
ig1_thresh_v[0, iCCD, kQ] = meta['IG1_THRESH'][ixloc[0][0]]
ig1_notch_v[0, iCCD, kQ] = meta['IG1_NOTCH'][ixloc[0][0]]
slope_v[0, iCCD, kQ] = meta['S'][ixloc[0][0]]
n_adu_v[0, iCCD, kQ] = meta['N_ADU'][ixloc[0][0]]
sidd.addColumn(bgd_adu_v, 'FIT_BGD_ADU', IndexCQ)
sidd.addColumn(ig1_thresh_v, 'FIT_IG1_THRESH', IndexCQ)
sidd.addColumn(ig1_notch_v, 'FIT_IG1_NOTCH', IndexCQ)
sidd.addColumn(slope_v, 'FIT_SLOPE', IndexCQ)
sidd.addColumn(n_adu_v, 'FIT_N_ADU', IndexCQ)
# charge injection profiles
verprofspick = os.path.join(productspath,
os.path.split(sidd.products['PROFS_ALCOL'])[-1])
verprofs = files.cPickleRead(verprofspick)
vprofkey = '%s_%s_%s_%i' % (testname, block, session, jrep + 1)
self.products['VERPROFILES'][vprofkey] = verprofs.copy()
vprofskeys_v = np.zeros((1),dtype='U50')
vprofskeys_v[0] = vprofkey
sidd.addColumn(vprofskeys_v, 'VERPROFS_KEY', IndexS)
horprofspick = os.path.join(productspath,
os.path.split(sidd.products['PROFS_ALROW'])[-1])
horprofs = files.cPickleRead(horprofspick)
hprofkey = '%s_%s_%s_%i' % (testname, block, session, jrep + 1)
self.products['HORPROFILES'][hprofkey] = horprofs.copy()
hprofskeys_v = np.zeros((1),dtype='U50')
hprofskeys_v[0] = hprofkey
sidd.addColumn(hprofskeys_v, 'HORPROFS_KEY', IndexS)
# flatten sidd to table
sit = sidd.flattentoTable()
return sit
def _get_extractor_NOTCH_fromPT(self, units):
""" """
def _extract_NOTCH_fromPT(PT, block, CCDk, Q):
ixblock = self.get_ixblock(PT, block)
column = 'FIT_N_ADU_%s_Quad%s' % (CCDk, Q)
if units == 'ADU':
unitsConvFactor = 1
elif units == 'E':
unitsConvFactor = self.cdps['GAIN'][block][CCDk][Q][0]
Notch = np.nanmedian(PT[column][ixblock]) * unitsConvFactor
return Notch
return _extract_NOTCH_fromPT
def _get_injcurve(self, _chfitdf, ixCCD, ixQ, IG1raw, gain):
""" """
ixsel = np.where((_chfitdf['CCD'] == ixCCD) & (_chfitdf['Q'] == ixQ))
pars = ['BGD', 'K', 'XT', 'XN', 'A', 'N']
trans = dict(BGD='b', K='k', XT='xt', XN='xN', A='a', N='N')
parsdict = dict()
for par in pars:
parsdict[trans[par]] = _chfitdf[par].values[ixsel][0]
parsdict['IG1'] = IG1raw.copy()
inj = ilib.f_Inj_vs_IG1_ReLU(**parsdict) * 2.**16 # ADU
inj_kel = inj * gain / 1.E3
return inj_kel
def _get_CHIG1_MAP_from_PT(self, kind='CAL'):
""" """
CHIG1MAP = OrderedDict()
CHIG1MAP['labelkeys'] = self.Quads
PT = self.ParsedTable['CHINJ01']
column = 'METAFIT'
IG1s = [2.5, 6.75]
dIG1 = 0.05
NIG1 = (IG1s[1] - IG1s[0]) / dIG1 + 1
IG1raw = np.arange(NIG1) * dIG1 + IG1s[0]
for jY in range(self.NSLICES_FPA):
for iX in range(self.NCOLS_FPA):
Ckey = 'C_%i%i' % (jY + 1, iX + 1)
CHIG1MAP[Ckey] = OrderedDict()
locator = self.fpa.FPA_MAP[Ckey]
block = locator[0]
CCDk = locator[1]
jCCD = int(CCDk[-1])
ixblock = np.where(PT['BLOCK'] == block)
if len(ixblock[0]) == 0:
CHIG1MAP[Ckey] = OrderedDict(x=OrderedDict(),
y=OrderedDict())
for Q in self.Quads:
CHIG1MAP[Ckey]['x'][Q] = []
CHIG1MAP[Ckey]['y'][Q] = []
continue
_chkey = PT[column][ixblock][0]
_chfitdf = self.products['METAFIT'][_chkey]
_ccd_chfitdict = OrderedDict(x=OrderedDict(),
y=OrderedDict())
for kQ, Q in enumerate(self.Quads):
roeVCal = self.roeVCals[block]
IG1cal = roeVCal.fcal_HK(IG1raw, 'IG1', jCCD, Q)
gain = self.cdps['GAIN'][block][CCDk][Q][0]
inj_kel = self._get_injcurve(_chfitdf, jCCD, kQ + 1, IG1raw, gain)
if kind == 'CAL':
_IG1 = IG1cal.copy()
elif kind == 'RAW':
_IG1 = IG1raw.copy()
_ccd_chfitdict['x'][Q] = _IG1.copy()
_ccd_chfitdict['y'][Q] = inj_kel.copy()
CHIG1MAP[Ckey] = _ccd_chfitdict.copy()
return CHIG1MAP
def _get_XYdict_INJ(self, kind='CAL'):
x = dict()
y = dict()
PT = self.ParsedTable['CHINJ01']
column = 'METAFIT'
IG1s = [2.5, 6.75]
dIG1 = 0.05
NIG1 = (IG1s[1] - IG1s[0]) / dIG1 + 1
IG1raw = np.arange(NIG1) * dIG1 + IG1s[0]
labelkeys = []
for block in self.flight_blocks:
ixblock = np.where(PT['BLOCK'] == block)
ch_key = PT[column][ixblock][0]
chfitdf = self.products['METAFIT'][ch_key]
for iCCD, CCD in enumerate(self.CCDs):
CCDk = 'CCD%i' % CCD
for kQ, Q in enumerate(self.Quads):
roeVCal = self.roeVCals[block]
IG1cal = roeVCal.fcal_HK(IG1raw, 'IG1', iCCD + 1, Q)
gain = self.cdps['GAIN'][block][CCDk][Q][0]
if kind == 'CAL':
_IG1 = IG1cal.copy()
elif kind == 'RAW':
_IG1 = IG1raw.copy()
pkey = '%s_%s_%s' % (block, CCDk, Q)
inj_kel = self._get_injcurve(chfitdf, iCCD + 1, kQ + 1, IG1raw, gain)
x[pkey] = _IG1.copy()
y[pkey] = inj_kel.copy()
labelkeys.append(pkey)
CHdict = dict(x=x, y=y, labelkeys=labelkeys)
return CHdict
def _extract_INJCURVES_PAR_fromPT(self,PT,block,CCDk,Q):
""" """
ixblock = self.get_ixblock(PT,block)
column = 'METAFIT'
ch_key = PT[column][ixblock][0]
chfitdf = self.products['METAFIT'][ch_key]
ixCCD = ['CCD1','CCD2','CCD3'].index(CCDk)+1
ixQ = ['E','F','G','H'].index(Q)+1
ixsel = np.where((chfitdf['CCD'] == ixCCD) & (chfitdf['Q'] == ixQ))
pars = ['BGD', 'K', 'XT', 'XN', 'A', 'N']
trans = dict(BGD='b', K='k', XT='xt', XN='xN', A='a', N='N')
parsdict = dict()
for par in pars:
parsdict[trans[par]] = '%.3e' % chfitdf[par].values[ixsel][0]
return parsdict
def _get_XYdict_PROFS(self,proftype, IG1=4.5, Quads=None, doNorm=False, xrangeNorm=None):
""" """
if Quads is None:
Quads = self.Quads
x = dict()
y = dict()
labelkeys = []
PT = self.ParsedTable['CHINJ01']
profcol = '%sPROFS_KEY' % proftype.upper()
prodkey = '%sPROFILES' % proftype.upper()
for block in self.flight_blocks:
ixsel = np.where(PT['BLOCK'] == block)
prof_key = PT[profcol][ixsel][0]
i_Prof = self.products[prodkey][prof_key].copy()
IG1key = 'IG1_%.2fV' % IG1
for iCCD, CCD in enumerate(self.CCDs):
CCDk = 'CCD%i' % CCD
for kQ, Q in enumerate(Quads):
pkey = '%s_%s_%s' % (block, CCDk, Q)
_pcq = i_Prof['data'][CCDk][Q].copy()
_x = _pcq['x'][IG1key].copy()
_y = _pcq['y'][IG1key].copy()
x[pkey] = _x
if doNorm:
if xrangeNorm is not None:
norm = np.nanmedian(_y[xrangeNorm[0]:xrangeNorm[1]])
else:
norm = np.nanmedian(_y)
y[pkey] = _y / norm
labelkeys.append(pkey)
Pdict = dict(x=x,y=y,labelkeys=labelkeys)
return Pdict
def init_fignames(self):
""" """
if not os.path.exists(self.figspath):
os.system('mkdir %s' % self.figspath)
self.figs['NOTCH_ADU_MAP'] = os.path.join(self.figspath,
'NOTCH_ADU_MAP.png')
self.figs['NOTCH_ELE_MAP'] = os.path.join(self.figspath,
'NOTCH_ELE_MAP.png')
self.figs['CHINJ01_curves_IG1_RAW'] = os.path.join(self.figspath,
'CHINJ01_CURVES_IG1_RAW.png')
self.figs['CHINJ01_curves_IG1_CAL'] = os.path.join(self.figspath,
'CHINJ01_CURVES_IG1_CAL.png')
self.figs['CHINJ01_curves_MAP_IG1_CAL'] = os.path.join(self.figspath,
'CHINJ01_CURVES_MAP_IG1_CAL.png')
for proftype in ['ver','hor']:
for ccdhalf in ['top','bot']:
figkey = 'PROFS_%s_%s' % (proftype.upper(),ccdhalf.upper())
self.figs[figkey] = os.path.join(self.figspath,
'CHINJ01_%s_%s_PROFILES.png' % \
(proftype.upper(),ccdhalf.upper()))
for ccdhalf in ['top','bot']:
figkey = 'PROFS_ver_%s_ZOOM' % (ccdhalf.upper(),)
self.figs[figkey] = os.path.join(self.figspath,
'CHINJ01_ver_%s_ZOOM_PROFILES.png' % \
(ccdhalf.upper()),)
def init_outcdpnames(self):
if not os.path.exists(self.cdpspath):
os.system('mkdir %s' % self.cdpspath)
self.outcdps['INJCURVES'] = 'CHINJ01_INJCURVES_PAR.json'
self.outcdps['INJPROF_XLSX_HOR'] = 'CHINJ01_INJPROFILES_HOR.xlsx'
self.outcdps['INJPROF_XLSX_VER'] = 'CHINJ01_INJPROFILES_VER.xlsx'
self.outcdps['INJPROF_FITS_HOR'] = 'CHINJ01_INJPROFILES_HOR.fits'
self.outcdps['INJPROF_FITS_VER'] = 'CHINJ01_INJPROFILES_VER.fits'
def _extract_NUNHOR_fromPT(self, PT, block, CCDk, Q):
""" """
IG1 = 4.5
ixblock = self.get_ixblock(PT, block)
profcol = 'HORPROFS_KEY'
prodkey = 'HORPROFILES'
prof_key = PT[profcol][ixblock][0]
i_Prof = self.products[prodkey][prof_key].copy()
IG1key = 'IG1_%.2fV' % IG1
_pcq = i_Prof['data'][CCDk][Q].copy()
_y = _pcq['y'][IG1key].copy()
return np.nanstd(_y)/np.nanmean(_y)*100.
def _get_injprof_dfdict(self, direction, pandice=False):
""" """
injprofs = OrderedDict()
Quads = self.Quads
PT = self.ParsedTable['CHINJ01']
profcol = '{}PROFS_KEY'.format(direction.upper())
prodkey = '{}PROFILES'.format(direction.upper())
for ib, block in enumerate(self.flight_blocks):
injprofs[block] = OrderedDict()
ixsel = np.where(PT['BLOCK'] == block)
prof_key = PT[profcol][ixsel][0]
i_Prof = self.products[prodkey][prof_key].copy()
if ib==0:
rawIG1keys = list(i_Prof['data']['CCD1']['E']['x'].keys())
IG1values = [float(item.replace('IG1_','').replace('V','')) for item in rawIG1keys]
_order = np.argsort(IG1values)
IG1keys = np.array(rawIG1keys)[_order].tolist()
IG1values = np.array(IG1values)[_order].tolist()
for IG1key in IG1keys:
for iCCD, CCD in enumerate(self.CCDs):
CCDk = 'CCD%i' % CCD
Ckey = self.fpa.get_Ckey_from_BlockCCD(block, CCD)
for kQ, Q in enumerate(Quads):
_pcq = i_Prof['data'][CCDk][Q].copy()
_x = _pcq['x'][IG1key].copy()
_y = _pcq['y'][IG1key].copy()
#_y /= np.nanmedian(_y)
if iCCD==0 and kQ==0:
injprofs[block]['pixel'] = _x.copy()
injprofs[block]['%s_%s_%s' % (Ckey,Q,IG1key)] = _y.copy()
if pandice:
for block in self.flight_blocks:
injprofs[block] = pd.DataFrame.from_dict(injprofs[block])
return injprofs, IG1values
def get_injprof_xlsx_cdp(self, direction, inCDP_header=None):
""" """
CDP_header = OrderedDict()
if CDP_header is not None:
CDP_header.update(inCDP_header)
cdpname = self.outcdps['INJPROF_XLSX_%s' % direction.upper()]
path = self.cdpspath
injprof_cdp = cdp.Tables_CDP()
injprof_cdp.rootname = os.path.splitext(cdpname)[0]
injprof_cdp.path = path
injprofs_meta = OrderedDict()
injprofs, IG1values = self._get_injprof_dfdict(direction, pandice=True)
injprofs_meta['IG1'] = IG1values.__repr__()
#injprofs_meta['norm'] = 'median'
injprof_cdp.ingest_inputs(data=injprofs.copy(),
meta=injprofs_meta.copy(),
header=CDP_header.copy())
injprof_cdp.init_wb_and_fillAll(
header_title='CHINJ01: INJPROFS-%s' % direction.upper())
return injprof_cdp
def get_injprof_fits_cdp(self, direction, inCDP_header=None):
""" """
CDP_header = OrderedDict()
if inCDP_header is not None:
CDP_header.update(inCDP_header)
cdpname = self.outcdps['INJPROF_FITS_%s' % direction.upper()]
path = self.cdpspath
injprof_cdp = cdp.FitsTables_CDP()
injprof_cdp.rootname = os.path.splitext(cdpname)[0]
injprof_cdp.path = path
injprofs_meta = OrderedDict()
injprofs, IG1values = self._get_injprof_dfdict(direction, pandice=False)
injprofs_meta['IG1'] = IG1values.__repr__()
#injprofs_meta['norm'] = 'median'
CDP_header = self.FITSify_CDP_header(CDP_header)
injprof_cdp.ingest_inputs(data=injprofs.copy(),
meta=injprofs_meta.copy(),
header=CDP_header.copy())
injprof_cdp.init_HL_and_fillAll()
injprof_cdp.hdulist[0].header.insert(list(CDP_header.keys())[0],
('title', 'CHINJ01: INJPROFS-%s' % direction.upper()))
return injprof_cdp
def dump_aggregated_results(self):
""" """
if self.report is not None:
self.report.add_Section(keyword='dump',
Title='Aggregated Results', level=0)
self.add_DataAlbaran2Report()
function, module = utils.get_function_module()
CDP_header = self.CDP_header.copy()
CDP_header.update(dict(function=function, module=module))
CDP_header['DATE'] = self.get_time_tag()
# Histogram of Slopes [ADU/electrons]
# Histogram of Notch [ADU/electrons]
# Histogram of IG1_THRESH
# Injection level vs. Calibrated IG1, MAP
CURVES_IG1CAL_MAP = self._get_CHIG1_MAP_from_PT(kind='CAL')
figkey1 = 'CHINJ01_curves_MAP_IG1_CAL'
figname1 = self.figs[figkey1]
self.plot_XYMAP(CURVES_IG1CAL_MAP, **dict(
suptitle='Charge Injection Curves - Calibrated IG1',
doLegend=True,
ylabel='Inj [kel]',
xlabel='IG1 [V]',
corekwargs=dict(E=dict(linestyle='-', marker='', color='r'),
F=dict(linestyle='-', marker='', color='g'),
G=dict(linestyle='-', marker='', color='b'),
H=dict(linestyle='-', marker='', color='m')),
figname=figname1
))
if self.report is not None:
self.addFigure2Report(figname1,
figkey=figkey1,
caption='CHINJ01: Charge injection level [ke-] as a function of '+\
'calibrated IG1 voltage.',
texfraction=0.7)
# saving charge injection parameters to a json CDP
ICURVES_PAR_MAP = self.get_FPAMAP_from_PT(
self.ParsedTable['CHINJ01'],
extractor=self._extract_INJCURVES_PAR_fromPT)
ic_header = OrderedDict()
ic_header['title'] = 'Injection Curves Parameters'
ic_header['test'] = 'CHINJ01'
ic_header.update(CDP_header)
ic_meta = OrderedDict()
ic_meta['units'] ='/2^16 ADU',
ic_meta['model'] = 'I=b+1/(1+exp(-K(IG1-XT))) * (-A*(IG1-XN)[IG1<XN] + N)'
ic_meta['structure'] = ''
ic_cdp = cdp.Json_CDP(rootname=self.outcdps['INJCURVES'],
path=self.cdpspath)
ic_cdp.ingest_inputs(data=ICURVES_PAR_MAP,
header = ic_header,
meta=ic_meta)
ic_cdp.savehardcopy()
# Injection level vs. Calibrated IG1, single plot
IG1CAL_Singledict = self._get_XYdict_INJ(kind='CAL')
figkey2 = 'CHINJ01_curves_IG1_CAL'
figname2 = self.figs[figkey2]
IG1CAL_kwargs = dict(
title='Charge Injection Curves - Calibrated IG1',
doLegend=False,
xlabel='IG1 (Calibrated) [V]',
ylabel='Injection [kel]',
figname=figname2)
corekwargs = dict()
for block in self.flight_blocks:
for iCCD in self.CCDs:
corekwargs['%s_CCD%i_E' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#FF4600') # red
corekwargs['%s_CCD%i_F' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#61FF00') # green
corekwargs['%s_CCD%i_G' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#00FFE0') # cyan
corekwargs['%s_CCD%i_H' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#1700FF') # blue
IG1CAL_kwargs['corekwargs'] = corekwargs.copy()
self.plot_XY(IG1CAL_Singledict, **IG1CAL_kwargs)
if self.report is not None:
self.addFigure2Report(figname2,
figkey=figkey2,
caption='CHINJ01: Charge injection level [ke-] as a function of '+\
'calibrated IG1 voltage.',
texfraction=0.7)
# Injection level vs. Non-Calibrated IG1, single plot
IG1RAW_Singledict = self._get_XYdict_INJ(kind='RAW')
figkey3 = 'CHINJ01_curves_IG1_RAW'
figname3 = self.figs[figkey3]
IG1RAW_kwargs = dict(
title='Charge Injection Curves - RAW IG1',
doLegend=False,
xlabel='IG1 (RAW) [V]',
ylabel='Injection [kel]',
figname=figname3)
corekwargs = dict()
for block in self.flight_blocks:
for iCCD in self.CCDs:
corekwargs['%s_CCD%i_E' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#FF4600') # red
corekwargs['%s_CCD%i_F' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#61FF00') # green
corekwargs['%s_CCD%i_G' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#00FFE0') # cyan
corekwargs['%s_CCD%i_H' % (block, iCCD)] = dict(linestyle='-',
marker='', color='#1700FF') # blue
IG1RAW_kwargs['corekwargs'] = corekwargs.copy()
self.plot_XY(IG1RAW_Singledict, **IG1RAW_kwargs)
if self.report is not None:
self.addFigure2Report(figname3,
figkey=figkey3,
caption='CHINJ01: Charge injection level [ke-] as a function of '+\
'Non-calibrated IG1 voltage.',
texfraction=0.7)
# Notch level vs. calibrated IG2
# Notch level vs. calibrated IDL
# Notch level vs. calibrated OD
# Notch injection map, ADUs
NOTCHADUMAP = self.get_FPAMAP_from_PT(
self.ParsedTable['CHINJ01'],
extractor=self._get_extractor_NOTCH_fromPT(
units='ADU'))
figkey4 = 'NOTCH_ADU_MAP'
figname4 = self.figs[figkey4]
self.plot_SimpleMAP(NOTCHADUMAP, **dict(
suptitle='CHINJ01: NOTCH INJECTION [ADU]',
ColorbarText='ADU',
figname=figname4))
if self.report is not None:
self.addFigure2Report(figname4,
figkey=figkey4,
caption='CHINJ01: notch injection level, in ADU.',
texfraction=0.7)
# Notch injection map, ELECTRONs
NOTCHEMAP = self.get_FPAMAP_from_PT(self.ParsedTable['CHINJ01'],
extractor=self._get_extractor_NOTCH_fromPT(units='E'))
figkey5 = 'NOTCH_ELE_MAP'
figname5 = self.figs[figkey5]
self.plot_SimpleMAP(NOTCHEMAP, **dict(
suptitle='CHINJ01: NOTCH INJECTION [ELECTRONS]',
ColorbarText='electrons',
figname=figname5))
if self.report is not None:
self.addFigure2Report(figname5,
figkey=figkey5,
caption='CHINJ01: notch injection level, in electrons.',
texfraction=0.7)
# Average injection profiles
IG1profs = 4.5
xlabels_profs = dict(hor='column [pix]',
ver='row [pix]')
ylabels_profs = dict(hor='Injection level [Normalized]',
ver='Injection level [ADU]',)
proftypes = ['hor','ver']
ccdhalves = ['top','bot']
BLOCKcolors = cm.rainbow(np.linspace(0, 1, len(self.flight_blocks)))
pointcorekwargs = dict()
for jblock, block in enumerate(self.flight_blocks):
jcolor = BLOCKcolors[jblock]
for iCCD in self.CCDs:
for kQ in self.Quads:
pointcorekwargs['%s_CCD%i_%s' % (block, iCCD, kQ)] = dict(
linestyle='', marker='.', color=jcolor, ms=2.0)
for ccdhalf in ccdhalves:
if ccdhalf == 'top':
_Quads = ['G','H']
elif ccdhalf == 'bot':
_Quads = ['E','F']
for proftype in proftypes:
if proftype == 'hor':
xrangeNorm = None
elif proftype == 'ver':
xrangeNorm = [10,20]
XY_profs = self._get_XYdict_PROFS(proftype=proftype,
IG1=IG1profs,Quads=_Quads, doNorm=True,
xrangeNorm=xrangeNorm)
figkey6 = 'PROFS_%s_%s' % (proftype.upper(),ccdhalf.upper())
figname6 = self.figs[figkey6]
title = 'CHINJ01: Direction: %s, CCDHalf: %s' % \
(proftype.upper(),ccdhalf.upper()),
if proftype == 'ver':
xlim=[0,50]
ylim=None
elif proftype == 'hor':
xlim=None
ylim=[0.5,1.5]
profkwargs = dict(
title=title,
doLegend=False,
xlabel=xlabels_profs[proftype],
xlim=xlim,
ylim=ylim,
ylabel=ylabels_profs[proftype],
figname=figname6,
corekwargs=pointcorekwargs)
self.plot_XY(XY_profs, **profkwargs)
if proftype == 'ver':
captemp = 'CHINJ01: Average (normalized) injection profiles in vertical direction (along CCD columns) '+\
'for IG1=%.2fV. Only the 2 channels in the CCD %s-half are shown '+\
'(%s, %s). Each colour corresponds to a '+\
'different block (2x3 quadrant-channels in each colour).'
elif proftype == 'hor':
captemp = 'CHINJ01: Average injection profiles in horizontal direction (along CCD rows) '+\
'for IG1=%.2fV. The profiles have been normalized by the median injection level. '+\
'Only the 2 channels in the CCD %s-half are shown (%s, %s). Each colour corresponds to a '+\
'different block (2x3 quadrant-channels in each colour).'
if self.report is not None:
self.addFigure2Report(figname6,
figkey=figkey6,
caption= captemp % (IG1profs, ccdhalf, _Quads[0],_Quads[1]),
texfraction=0.7)
# Average injection vertical profiles, zoomed in to highlight
# non-perfect charge injection shut-down.
pointcorekwargs = dict()
for jblock, block in enumerate(self.flight_blocks):
jcolor = BLOCKcolors[jblock]
for iCCD in self.CCDs:
for kQ in self.Quads:
pointcorekwargs['%s_CCD%i_%s' % (block, iCCD, kQ)] = dict(
linestyle='', marker='.', color=jcolor, ms=2.0)
for ccdhalf in ccdhalves:
if ccdhalf == 'top':
_Quads = ['G','H']
elif ccdhalf == 'bot':
_Quads = ['E','F']
XY_profs = self._get_XYdict_PROFS(proftype='ver',
IG1=IG1profs,Quads=_Quads, doNorm=True,
xrangeNorm=[10,20])
figkey7 = 'PROFS_ver_%s_ZOOM' % (ccdhalf.upper(),)
figname7 = self.figs[figkey7]
title = 'CHINJ01: Direction: ver, CCDHalf: %s, ZOOM-in' % \
(ccdhalf.upper(),),
xlim=[25,50]
ylim=[0,4.e-3]
profkwargs = dict(
title=title,
doLegend=False,
xlabel=xlabels_profs[proftype],
xlim=xlim,
ylim=ylim,
ylabel=ylabels_profs[proftype],
figname=figname7,
corekwargs=pointcorekwargs)
self.plot_XY(XY_profs, **profkwargs)
captemp = 'CHINJ01: Average injection profiles in vertical direction (along CCD columns) '+\
'for IG1=%.2fV. Only the 2 channels in the CCD %s-half are shown '+\
'(%s, %s). Each colour corresponds to a '+\
'different block (2x3 quadrant-channels in each colour). Zoomed in '+\
'to highlight injection shutdown profile.'
if self.report is not None:
self.addFigure2Report(figname7,
figkey=figkey7,
caption= captemp % (IG1profs, ccdhalf, _Quads[0],_Quads[1]),
texfraction=0.7)
# creating and saving INJ PROFILES CDPs.
for direction in ['hor','ver']:
_injprof_xlsx_cdp = self.get_injprof_xlsx_cdp(direction=direction,
inCDP_header=CDP_header)
_injprof_xlsx_cdp.savehardcopy()
_injprof_fits_cdp = self.get_injprof_fits_cdp(direction=direction,
inCDP_header=CDP_header)
_injprof_fits_cdp.savehardcopy()
# reporting non-uniformity of injection lines to report
if self.report is not None:
NUN_HOR = self.get_FPAMAP_from_PT(self.ParsedTable['CHINJ01'],
extractor=self._extract_NUNHOR_fromPT)
nun_cdpdict = dict(
caption='CHINJ01: Non-Uniformity of the injection lines, rms, as percentage.',
valformat='%.2f')
ignore = self.add_StdQuadsTable2Report(
Matrix = NUN_HOR,
cdpdict = nun_cdpdict)
| ruymanengithub/vison | vison/metatests/chinj01.py | Python | gpl-3.0 | 34,380 | 0.006603 |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# streamondemand 5
# Copyright 2015 tvalacarta@gmail.com
# http://www.mimediacenter.info/foro/viewforum.php?f=36
#
# Distributed under the terms of GNU General Public License v3 (GPLv3)
# http://www.gnu.org/licenses/gpl-3.0.html
# ------------------------------------------------------------
# This file is part of streamondemand 5.
#
# streamondemand 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# streamondemand 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with streamondemand 5. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------------------
# Scraper tools for reading and processing web elements
# --------------------------------------------------------------------------------
import re
import socket
import time
import logger
from core import httptools
def cache_page(url,post=None,headers=None,modo_cache=None, timeout=None):
return cachePage(url,post,headers,modo_cache,timeout=timeout)
def cachePage(url,post=None,headers=None,modoCache=None, timeout=None):
data = downloadpage(url,post=post,headers=headers, timeout=timeout)
return data
def downloadpage(url,post=None,headers=None, follow_redirects=True, timeout=None, header_to_get=None):
response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects = follow_redirects, timeout=timeout)
if header_to_get:
return response.headers.get(header_to_get)
else:
return response.data
def downloadpageWithResult(url,post=None,headers=None,follow_redirects=True, timeout=None, header_to_get=None):
response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects = follow_redirects, timeout=timeout)
if header_to_get:
return response.headers.get(header_to_get)
else:
return response.data, response.code
def downloadpageWithoutCookies(url):
response = httptools.downloadpage(url, cookies=False)
return response.data
def downloadpageGzip(url):
response = httptools.downloadpage(url, add_referer=True)
return response.data
def getLocationHeaderFromResponse(url):
response = httptools.downloadpage(url, only_headers=True, follow_redirects=False)
return response.headers.get("location")
def get_header_from_response(url, header_to_get="", post=None, headers=None, follow_redirects=False):
header_to_get = header_to_get.lower()
response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True,
follow_redirects=follow_redirects)
return response.headers.get(header_to_get)
def get_headers_from_response(url, post=None, headers=None, follow_redirects=False):
response = httptools.downloadpage(url, post=post, headers=headers, only_headers=True,
follow_redirects=follow_redirects)
return response.headers.items()
def read_body_and_headers(url, post=None, headers=None, follow_redirects=False, timeout=None):
response = httptools.downloadpage(url, post=post, headers=headers, follow_redirects=follow_redirects, timeout=timeout)
return response.data, response.headers
def anti_cloudflare(url, headers=None, post=None):
#anti_cloudfare ya integrado en httptools por defecto
response = httptools.downloadpage(url, post=post, headers=headers)
return response.data
def printMatches(matches):
i = 0
for match in matches:
logger.info("streamondemand-pureita-master.core.scrapertools %d %s" % (i , match))
i = i + 1
def get_match(data,patron,index=0):
matches = re.findall( patron , data , flags=re.DOTALL )
return matches[index]
def find_single_match(data,patron,index=0):
try:
matches = re.findall( patron , data , flags=re.DOTALL )
return matches[index]
except:
return ""
# Parse string and extracts multiple matches using regular expressions
def find_multiple_matches(text,pattern):
return re.findall(pattern,text,re.DOTALL)
def entityunescape(cadena):
return unescape(cadena)
def unescape(text):
"""Removes HTML or XML character references
and entities from a text string.
keep &, >, < in the source code.
from Fredrik Lundh
http://effbot.org/zone/re-sub.htm#unescape-html
"""
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16)).encode("utf-8")
else:
return unichr(int(text[2:-1])).encode("utf-8")
except ValueError:
logger.info("error de valor")
pass
else:
# named entity
try:
'''
if text[1:-1] == "amp":
text = "&amp;"
elif text[1:-1] == "gt":
text = "&gt;"
elif text[1:-1] == "lt":
text = "&lt;"
else:
print text[1:-1]
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]).encode("utf-8")
'''
import htmlentitydefs
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]).encode("utf-8")
except KeyError:
logger.info("keyerror")
pass
except:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
# Convierte los codigos html "ñ" y lo reemplaza por "ñ" caracter unicode utf-8
def decodeHtmlentities(string):
string = entitiesfix(string)
entity_re = re.compile("&(#?)(\d{1,5}|\w{1,8});")
def substitute_entity(match):
from htmlentitydefs import name2codepoint as n2cp
ent = match.group(2)
if match.group(1) == "#":
return unichr(int(ent)).encode('utf-8')
else:
cp = n2cp.get(ent)
if cp:
return unichr(cp).encode('utf-8')
else:
return match.group()
return entity_re.subn(substitute_entity, string)[0]
def entitiesfix(string):
# Las entidades comienzan siempre con el símbolo & , y terminan con un punto y coma ( ; ).
string = string.replace("á","á")
string = string.replace("é","é")
string = string.replace("í","í")
string = string.replace("ó","ó")
string = string.replace("ú","ú")
string = string.replace("Á","Á")
string = string.replace("É","É")
string = string.replace("Í","Í")
string = string.replace("Ó","Ó")
string = string.replace("Ú","Ú")
string = string.replace("ü" ,"ü")
string = string.replace("Ü" ,"Ü")
string = string.replace("ñ","ñ")
string = string.replace("¿" ,"¿")
string = string.replace("¡" ,"¡")
string = string.replace(";;" ,";")
return string
def htmlclean(cadena):
cadena = re.compile("<!--.*?-->",re.DOTALL).sub("",cadena)
cadena = cadena.replace("<center>","")
cadena = cadena.replace("</center>","")
cadena = cadena.replace("<cite>","")
cadena = cadena.replace("</cite>","")
cadena = cadena.replace("<em>","")
cadena = cadena.replace("</em>","")
cadena = cadena.replace("<u>","")
cadena = cadena.replace("</u>","")
cadena = cadena.replace("<li>","")
cadena = cadena.replace("</li>","")
cadena = cadena.replace("<turl>","")
cadena = cadena.replace("</tbody>","")
cadena = cadena.replace("<tr>","")
cadena = cadena.replace("</tr>","")
cadena = cadena.replace("<![CDATA[","")
cadena = cadena.replace("<Br />"," ")
cadena = cadena.replace("<BR />"," ")
cadena = cadena.replace("<Br>"," ")
cadena = re.compile("<br[^>]*>",re.DOTALL).sub(" ",cadena)
cadena = re.compile("<script.*?</script>",re.DOTALL).sub("",cadena)
cadena = re.compile("<option[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</option>","")
cadena = re.compile("<button[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</button>","")
cadena = re.compile("<i[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</iframe>","")
cadena = cadena.replace("</i>","")
cadena = re.compile("<table[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</table>","")
cadena = re.compile("<td[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</td>","")
cadena = re.compile("<div[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</div>","")
cadena = re.compile("<dd[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</dd>","")
cadena = re.compile("<b[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</b>","")
cadena = re.compile("<font[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</font>","")
cadena = re.compile("<strong[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</strong>","")
cadena = re.compile("<small[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</small>","")
cadena = re.compile("<span[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</span>","")
cadena = re.compile("<a[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</a>","")
cadena = re.compile("<p[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</p>","")
cadena = re.compile("<ul[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</ul>","")
cadena = re.compile("<h1[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h1>","")
cadena = re.compile("<h2[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h2>","")
cadena = re.compile("<h3[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h3>","")
cadena = re.compile("<h4[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</h4>","")
cadena = re.compile("<!--[^-]+-->",re.DOTALL).sub("",cadena)
cadena = re.compile("<img[^>]*>",re.DOTALL).sub("",cadena)
cadena = re.compile("<object[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</object>","")
cadena = re.compile("<param[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</param>","")
cadena = re.compile("<embed[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</embed>","")
cadena = re.compile("<title[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("</title>","")
cadena = re.compile("<link[^>]*>",re.DOTALL).sub("",cadena)
cadena = cadena.replace("\t","")
cadena = entityunescape(cadena)
return cadena
def slugify(title):
#print title
# Sustituye acentos y eñes
title = title.replace("Á","a")
title = title.replace("É","e")
title = title.replace("Í","i")
title = title.replace("Ó","o")
title = title.replace("Ú","u")
title = title.replace("á","a")
title = title.replace("é","e")
title = title.replace("í","i")
title = title.replace("ó","o")
title = title.replace("ú","u")
title = title.replace("À","a")
title = title.replace("È","e")
title = title.replace("Ì","i")
title = title.replace("Ò","o")
title = title.replace("Ù","u")
title = title.replace("à","a")
title = title.replace("è","e")
title = title.replace("ì","i")
title = title.replace("ò","o")
title = title.replace("ù","u")
title = title.replace("ç","c")
title = title.replace("Ç","C")
title = title.replace("Ñ","n")
title = title.replace("ñ","n")
title = title.replace("/","-")
title = title.replace("&","&")
# Pasa a minúsculas
title = title.lower().strip()
# Elimina caracteres no válidos
validchars = "abcdefghijklmnopqrstuvwxyz1234567890- "
title = ''.join(c for c in title if c in validchars)
# Sustituye espacios en blanco duplicados y saltos de línea
title = re.compile("\s+",re.DOTALL).sub(" ",title)
# Sustituye espacios en blanco por guiones
title = re.compile("\s",re.DOTALL).sub("-",title.strip())
# Sustituye espacios en blanco duplicados y saltos de línea
title = re.compile("\-+",re.DOTALL).sub("-",title)
# Arregla casos especiales
if title.startswith("-"):
title = title [1:]
if title=="":
title = "-"+str(time.time())
return title
def remove_htmltags(string):
return re.sub('<[^<]+?>', '', string)
def remove_show_from_title(title,show):
#print slugify(title)+" == "+slugify(show)
# Quita el nombre del programa del título
if slugify(title).startswith(slugify(show)):
# Convierte a unicode primero, o el encoding se pierde
title = unicode(title,"utf-8","replace")
show = unicode(show,"utf-8","replace")
title = title[ len(show) : ].strip()
if title.startswith("-"):
title = title[ 1: ].strip()
if title=="":
title = str( time.time() )
# Vuelve a utf-8
title = title.encode("utf-8","ignore")
show = show.encode("utf-8","ignore")
return title
def getRandom(str):
return get_md5(str)
def unseo(cadena):
if cadena.upper().startswith("VER GRATIS LA PELICULA "):
cadena = cadena[23:]
elif cadena.upper().startswith("VER GRATIS PELICULA "):
cadena = cadena[20:]
elif cadena.upper().startswith("VER ONLINE LA PELICULA "):
cadena = cadena[23:]
elif cadena.upper().startswith("VER GRATIS "):
cadena = cadena[11:]
elif cadena.upper().startswith("VER ONLINE "):
cadena = cadena[11:]
elif cadena.upper().startswith("DESCARGA DIRECTA "):
cadena = cadena[17:]
return cadena
#scrapertools.get_filename_from_url(media_url)[-4:]
def get_filename_from_url(url):
import urlparse
parsed_url = urlparse.urlparse(url)
try:
filename = parsed_url.path
except:
# Si falla es porque la implementación de parsed_url no reconoce los atributos como "path"
if len(parsed_url)>=4:
filename = parsed_url[2]
else:
filename = ""
if "/" in filename:
filename = filename.split("/")[-1]
return filename
def get_domain_from_url(url):
import urlparse
parsed_url = urlparse.urlparse(url)
try:
filename = parsed_url.netloc
except:
# Si falla es porque la implementación de parsed_url no reconoce los atributos como "path"
if len(parsed_url)>=4:
filename = parsed_url[1]
else:
filename = ""
return filename
def get_season_and_episode(title):
"""
Retorna el numero de temporada y de episodio en formato "1x01" obtenido del titulo de un episodio
Ejemplos de diferentes valores para title y su valor devuelto:
"serie 101x1.strm", "s101e1.avi", "t101e1.avi" -> '101x01'
"Name TvShow 1x6.avi" -> '1x06'
"Temp 3 episodio 2.avi" -> '3x02'
"Alcantara season 13 episodie 12.avi" -> '13x12'
"Temp1 capitulo 14" -> '1x14'
"Temporada 1: El origen Episodio 9" -> '' (entre el numero de temporada y los episodios no puede haber otro texto)
"Episodio 25: titulo episodio" -> '' (no existe el numero de temporada)
"Serie X Temporada 1" -> '' (no existe el numero del episodio)
@type title: str
@param title: titulo del episodio de una serie
@rtype: str
@return: Numero de temporada y episodio en formato "1x01" o cadena vacia si no se han encontrado
"""
filename = ""
patrons = ["(\d+)x(\d+)", "(?:s|t)(\d+)e(\d+)",
"(?:season|stag\w*)\s*(\d+)\s*(?:capitolo|epi\w*)\s*(\d+)"]
for patron in patrons:
try:
matches = re.compile(patron, re.I).search(title)
if matches:
filename = matches.group(1) + "x" + matches.group(2).zfill(2)
break
except:
pass
logger.info("'" + title + "' -> '" + filename + "'")
return filename
def get_sha1(cadena):
try:
import hashlib
devuelve = hashlib.sha1(cadena).hexdigest()
except:
import sha
import binascii
devuelve = binascii.hexlify(sha.new(cadena).digest())
return devuelve
def get_md5(cadena):
try:
import hashlib
devuelve = hashlib.md5(cadena).hexdigest()
except:
import md5
import binascii
devuelve = binascii.hexlify(md5.new(cadena).digest())
return devuelve
def internet(host="8.8.8.8", port=53, timeout=3):
"""
Host: 8.8.8.8 (google-public-dns-a.google.com)
OpenPort: 53/tcp
Service: domain (DNS/TCP)
"""
try:
deftimeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
socket.setdefaulttimeout(deftimeout)
return True
except:
pass
return False
def wait_for_internet(wait=30, retry=5):
import xbmc
monitor = xbmc.Monitor()
count = 0
while True:
if internet():
return True
count += 1
if count >= retry or monitor.abortRequested():
return False
monitor.waitForAbort(wait)
| orione7/plugin.video.streamondemand-pureita | core/scrapertools.py | Python | gpl-3.0 | 17,985 | 0.015934 |
"""
These are the Connection classes, relatively high level classes that handle
incoming or outgoing network connections.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
##############################################################################
LICENSE = """\
This file is part of pagekite.py.
Copyright 2010-2020, the Beanstalks Project ehf. and Bjarni Runar Einarsson
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation, either version 3 of the License, or (at your option) any
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see: <http://www.gnu.org/licenses/>
"""
##############################################################################
import six
import socket
import sys
import threading
import time
from pagekite.compat import *
from pagekite.common import *
import pagekite.common as common
import pagekite.logging as logging
from .filters import HttpSecurityFilter
from .selectables import *
from .parsers import *
from .proto import *
SMTP_PORTS = (25, 465, 587, 2525)
class Tunnel(ChunkParser):
"""A Selectable representing a PageKite tunnel."""
S_NAME = 0
S_PORTS = 1
S_RAW_PORTS = 2
S_PROTOS = 3
S_ADD_KITES = 4
S_IS_MOBILE = 5
S_VERSION = 6
S_WEBSOCKET = 7
def __init__(self, conns):
ChunkParser.__init__(self, ui=conns.config.ui)
if conns.config.websocket_chunks:
self.PrepareWebsockets()
self.server_info = ['x.x.x.x:x', [], [], [], False, False, None, False]
self.Init(conns)
def Init(self, conns):
self.conns = conns
self.users = {}
self.remote_ssl = {}
self.zhistory = {}
self.backends = {}
self.last_ping = 0
self.weighted_rtt = -1
self.using_tls = False
self.filters = []
self.ip_limits = None
self.maxread = int(common.MAX_READ_BYTES * common.MAX_READ_TUNNEL_X)
def Cleanup(self, close=True):
if self.users:
for sid in list(six.iterkeys(self.users)):
self.CloseStream(sid)
ChunkParser.Cleanup(self, close=close)
self.Init(None)
def __html__(self):
return ('<b>Server name</b>: %s<br>'
'%s') % (self.server_info[self.S_NAME], ChunkParser.__html__(self))
def LogTrafficStatus(self, final=False):
if self.ui:
if final:
message = 'Disconnected from: %s' % self.server_info[self.S_NAME]
self.ui.Status('down', color=self.ui.GREY, message=message)
else:
self.ui.Status('traffic')
def GetKiteRequests(self, parse):
requests = []
for prefix in ('X-Beanstalk', 'X-PageKite'):
for bs in parse.Header(prefix):
# X-PageKite: proto:my.domain.com:token:signature
proto, domain, srand, token, sign = bs.split(':')
requests.append((proto.lower(), domain.lower(),
srand, token, sign, prefix))
return requests
def RejectTraffic(self, client_conn, address, host):
# This function allows the tunnel to reject an incoming connection
# based on the remote address and the requested host. For now we
# only know how to discriminate by remote IP.
return self.RejectRemoteIP(client_conn, str(address[0]), host) or False
def RejectRemoteIP(self, client_conn, ip, host):
if not self.ip_limits:
return False
if len(self.ip_limits) == 1:
whitelist = self.ip_limits[0]
delta = maxips = seen = None
else:
whitelist = None
delta, maxips, seen = self.ip_limits
# Do we have a whitelist-only policy for this tunnel?
if whitelist:
for prefix in whitelist:
if ip.startswith(prefix):
return False
self.LogError('Rejecting connection from unrecognized IP')
return 'not_whitelisted'
# Do we have a delta/maxips policy?
if delta and maxips:
# Since IP addresses are often shared, we try to differentiate browsers
# based on few of the request headers as well. We don't track cookies
# since they're mutated by the site itself, which would lead to false
# positives here.
client = ip
log_info = []
if hasattr(client_conn, 'parser'):
if hasattr(client_conn.parser, 'Header'):
client = sha1hex('/'.join([ip] +
(client_conn.parser.Header('User-Agent') or []) +
(client_conn.parser.Header('Accept-Language') or [])))
if hasattr(client_conn.parser, 'method'):
log_info.append(
(str(client_conn.parser.method), str(client_conn.parser.path)))
now = time.time()
if client in seen:
seen[client] = now
return False
for seen_ip in list(six.iterkeys(seen)):
if seen[seen_ip] < now - delta:
del seen[seen_ip]
if len(seen) >= maxips:
self.LogError('Rejecting connection from new client',
[('client', client[:12]),
('ips_per_sec', '%d/%ds' % (maxips, delta)),
('domain', host)] + log_info)
return 'ips_per_sec'
else:
seen[client] = now
return False
# All else is allowed
return False
def ProcessPageKiteHeaders(self, parser):
for prefix in ('X-Beanstalk', 'X-PageKite'):
for feature in parser.Header(prefix+'-Features'):
if feature == 'ZChunks':
if not self.conns.config.disable_zchunks:
self.EnableZChunks(level=1)
elif feature == 'AddKites':
self.server_info[self.S_ADD_KITES] = True
elif feature == 'Mobile':
self.server_info[self.S_IS_MOBILE] = True
# Track which versions we see in the wild.
version = 'old'
for v in parser.Header(prefix+'-Version'):
version = v
if common.gYamon:
common.gYamon.vadd('version-%s' % version, 1, wrap=10000000)
self.server_info[self.S_VERSION] = version
for replace in parser.Header(prefix+'-Replace'):
if replace in self.conns.conns_by_id:
repl = self.conns.conns_by_id[replace]
self.LogInfo('Disconnecting old tunnel: %s' % repl)
repl.Die(discard_buffer=True)
def _FrontEnd(conn, body, conns):
"""This is what the front-end does when a back-end requests a new tunnel."""
self = Tunnel(conns)
try:
if 'websocket' in conn.parser.Header('Upgrade'):
self.server_info[self.S_ADD_KITES] = True
self.server_info[self.S_WEBSOCKET] = (
''.join(conn.parser.Header('Sec-WebSocket-Key')) or True)
self.ProcessPageKiteHeaders(conn.parser)
requests = self.GetKiteRequests(conn.parser)
except Exception as err:
self.LogError('Discarding connection: %s' % err)
self.Cleanup()
return None
except socket.error as err:
self.LogInfo('Discarding connection: %s' % err)
self.Cleanup()
return None
try:
ips, seconds = conns.config.GetDefaultIPsPerSecond()
self.UpdateIP_Limits(ips, seconds)
except ValueError:
pass
self.last_activity = time.time()
self.CountAs('backends_live')
self.SetConn(conn)
if requests:
conns.auth().check(requests[:], conn,
lambda r, l: self.AuthCallback(conn, r, l))
elif self.server_info[self.S_WEBSOCKET]:
self.AuthCallback(conn, [], [])
return self
def RecheckQuota(self, conns, when=None):
if when is None: when = time.time()
if (self.quota and
self.quota[0] is not None and
self.quota[1] and
(self.quota[2] < when-900)):
self.quota[2] = when
self.LogDebug('Rechecking: %s' % (self.quota, ))
conns.auth().check(self.quota[1], self,
lambda r, l: self.QuotaCallback(conns, r, l))
def ProcessAuthResults(self, results, duplicates_ok=False, add_tunnels=True):
ok = []
bad = []
if not self.conns:
# This can be delayed until the connecting client gives up, which
# means we may have already called Die(). In that case, just abort.
return True
ok_results = ['X-PageKite-OK']
bad_results = ['X-PageKite-Invalid']
if duplicates_ok is True:
ok_results.extend(['X-PageKite-Duplicate'])
elif duplicates_ok is False:
bad_results.extend(['X-PageKite-Duplicate'])
for r in results:
if r[0] in ok_results:
ok.append(r[1])
elif r[0] in bad_results:
bad.append(r[1])
elif r[0] == 'X-PageKite-SessionID':
self.conns.SetAltId(self, r[1])
logi = []
if self.server_info[self.S_IS_MOBILE]:
logi.append(('mobile', 'True'))
if self.server_info[self.S_ADD_KITES]:
logi.append(('add_kites', 'True'))
if self.server_info[self.S_WEBSOCKET]:
logi.append(('websocket', 'True'))
if self.server_info[self.S_VERSION]:
logi.append(('version', self.server_info[self.S_VERSION]))
if bad:
for backend in bad:
if backend in self.backends:
del self.backends[backend]
proto, domain, srand = backend.split(':')
self.Log([('BE', 'Dead'), ('proto', proto), ('domain', domain)] + logi,
level=logging.LOG_LEVEL_MACH)
self.conns.CloseTunnel(proto, domain, self)
# Update IP rate limits, if necessary
first = True
for r in results:
if r[0] in ('X-PageKite-IPsPerSec',):
ips, seconds = [int(x) for x in r[1].split('/')]
self.UpdateIP_Limits(ips, seconds, force=first)
first = False
if first:
for backend in ok:
try:
proto, domain, srand = backend.split(':')
ips, seconds = self.conns.config.GetDefaultIPsPerSecond(domain)
self.UpdateIP_Limits(ips, seconds)
except ValueError:
pass
if add_tunnels:
if self.ip_limits and len(self.ip_limits) > 2:
logi.append(('ips_per_sec',
'%d/%ds' % (self.ip_limits[1], self.ip_limits[0])))
for backend in ok:
if backend not in self.backends:
self.backends[backend] = 1
proto, domain, srand = backend.split(':')
self.Log([('BE', 'Live'),
('proto', proto),
('domain', domain)] + logi,
level=logging.LOG_LEVEL_MACH)
self.conns.Tunnel(proto, domain, self)
if not ok:
if self.server_info[self.S_ADD_KITES] and not bad:
self.LogDebug('No tunnels configured, idling...')
self.conns.SetIdle(self, 60)
else:
self.LogWarning('No tunnels configured, closing connection.')
self.Die()
return True
def QuotaCallback(self, conns, results, log_info):
# Report new values to the back-end... unless they are mobile.
if self.quota and (self.quota[0] >= 0):
if not self.server_info[self.S_IS_MOBILE]:
self.SendQuota()
self.ProcessAuthResults(results, duplicates_ok=True, add_tunnels=False)
for r in results:
if r[0] in ('X-PageKite-OK', 'X-PageKite-Duplicate'):
return self
# Nothing is OK anymore, give up and shut down the tunnel.
self.Log(log_info)
self.LogWarning('Ran out of quota or account deleted, closing tunnel.')
self.Die()
return self
def AuthCallback(self, conn, results, log_info):
if log_info:
logging.Log(log_info)
if self.server_info[self.S_WEBSOCKET]:
output = [HTTP_WebsocketResponse(self.server_info[self.S_WEBSOCKET])]
extras = []
else:
output = [HTTP_ResponseHeader(200, 'OK'),
HTTP_Header('Transfer-Encoding', 'chunked')]
extras = output
if not self.conns.config.disable_zchunks:
output.append(HTTP_Header('X-PageKite-Features', 'ZChunks'))
extras.extend([
HTTP_Header('X-PageKite-Features', 'WebSockets'),
HTTP_Header('X-PageKite-Features', 'AddKites'),
HTTP_Header('X-PageKite-Protos', ', '.join(['%s' % p
for p in self.conns.config.server_protos])),
HTTP_Header('X-PageKite-Ports', ', '.join(
['%s' % self.conns.config.server_portalias.get(p, p)
for p in self.conns.config.server_ports]))])
if self.conns.config.server_raw_ports:
extras.append(
HTTP_Header('X-PageKite-Raw-Ports',
', '.join(['%s' % p for p
in self.conns.config.server_raw_ports])))
for r in results:
extras.append('%s: %s\r\n' % r)
output.append(HTTP_StartBody())
if not self.Send(output, activity=False):
conn.LogDebug('No tunnels configured, closing connection (send failed).')
self.Die(discard_buffer=True)
return self
if conn.quota and conn.quota[0]:
self.quota = conn.quota
self.Log([('BE-Quota', self.quota[0])])
if self.server_info[self.S_WEBSOCKET]:
self.EnableWebsockets()
self.SendChunked('NOOP: 1\r\n%s\r\n!' % ''.join(extras))
self.conns.Add(self)
elif self.ProcessAuthResults(results):
self.conns.Add(self)
else:
self.Die()
return self
def ChunkAuthCallback(self, results, log_info):
if log_info:
logging.Log(log_info, level=logging.LOG_LEVEL_MACH)
if self.ProcessAuthResults(results):
output = ['NOOP: 1\r\n']
for r in results:
output.append('%s: %s\r\n' % r)
output.append('\r\n!')
self.SendChunked(''.join(output), compress=False, just_buffer=True)
def _RecvHttpHeaders(self, fd=None):
data = ''
fd = fd or self.fd
while not data.endswith('\r\n\r\n') and not data.endswith('\n\n'):
try:
buf = s(fd.recv(1))
except:
# This is sloppy, but the back-end will just connect somewhere else
# instead, so laziness here should be fine.
buf = None
if buf is None or buf == '':
self.LogDebug('Remote end closed connection.')
return None
data += buf
self.read_bytes += len(buf)
if logging.DEBUG_IO:
print('<== IN (headers) =[%s]==(\n%s)==' % (self, data))
return data
def _Connect(self, server, conns, tokens=None):
if self.fd:
self.fd.close()
sspec = server.rsplit(':', 1)
if len(sspec) < 2:
sspec = (sspec[0], 443)
# Use chained SocksiPy to secure our communication.
socks.DEBUG = (logging.DEBUG_IO or socks.DEBUG) and logging.LogDebug
sock = socks.socksocket()
if socks.HAVE_SSL:
pp = socks.parseproxy
chain = [pp('default')]
if self.conns.config.fe_nocertcheck:
chain.append([socks.PROXY_TYPE_SSL_WEAK, sspec[0], int(sspec[1])])
elif self.conns.config.fe_certname:
chain.append(pp('http!%s!%s' % (sspec[0], sspec[1])))
chain.append(pp('ssl!%s!443' % ','.join(self.conns.config.fe_certname)))
for hop in chain:
sock.addproxy(*hop)
self.SetFD(sock)
try:
# Note: This value is a magic number which should correlate with
# bounds on auth thread queue length, set in AuthThread._run().
self.fd.settimeout(30.0) # Missing in Python 2.2
except:
self.fd.setblocking(1)
self.LogDebug('Connecting to %s:%s' % (sspec[0], sspec[1]))
self.fd.connect((sspec[0], int(sspec[1])))
replace_sessionid = self.conns.config.servers_sessionids.get(server, None)
if (not self.Send(HTTP_PageKiteRequest(server,
conns.config.backends,
tokens,
nozchunks=conns.config.disable_zchunks,
replace=replace_sessionid,
websocket_key=self.websocket_key),
activity=False, try_flush=True, allow_blocking=False)
or not self.Flush(wait=True, allow_blocking=False)):
self.LogError('Failed to send kite request, closing.')
raise IOError('Failed to send kite request, closing.')
data = self._RecvHttpHeaders()
if not data:
self.LogError('Failed to parse kite response, closing.')
raise IOError('Failed to parse kite response, closing.')
self.fd.setblocking(0)
parse = HttpLineParser(lines=data.splitlines(),
state=HttpLineParser.IN_RESPONSE)
return data, parse
def CheckForTokens(self, parse):
tcount = 0
tokens = {}
if parse:
for request in parse.Header('X-PageKite-SignThis'):
proto, domain, srand, token = request.split(':')
tokens['%s:%s' % (proto, domain)] = token
tcount += 1
return tcount, tokens
def ParsePageKiteCapabilities(self, parse):
for portlist in parse.Header('X-PageKite-Ports'):
self.server_info[self.S_PORTS].extend(portlist.split(', '))
for portlist in parse.Header('X-PageKite-Raw-Ports'):
self.server_info[self.S_RAW_PORTS].extend(portlist.split(', '))
for protolist in parse.Header('X-PageKite-Protos'):
self.server_info[self.S_PROTOS].extend(protolist.split(', '))
if not self.conns.config.disable_zchunks:
for feature in parse.Header('X-PageKite-Features'):
if feature == 'ZChunks':
self.EnableZChunks(level=9)
elif feature == 'AddKites':
self.server_info[self.S_ADD_KITES] = True
elif feature == 'Mobile':
self.server_info[self.S_IS_MOBILE] = True
def UpdateIP_Limits(self, ips, seconds, force=False):
if self.ip_limits and len(self.ip_limits) > 2 and not force:
new_rate = float(ips)/(seconds or 1) # Float division
old_rate = float(self.ip_limits[1] or 9999)/(self.ip_limits[0] or 1) # Float division
if new_rate < old_rate:
self.ip_limits[0] = seconds
self.ip_limits[1] = ips
else:
self.ip_limits = [(seconds or 1), ips, {}]
def HandlePageKiteResponse(self, parse):
config = self.conns.config
have_kites = 0
have_kite_info = None
sname = self.server_info[self.S_NAME]
config.ui.NotifyServer(self, self.server_info)
logged = 0
for misc in parse.Header('X-PageKite-Misc'):
args = parse_qs(misc)
logdata = [('FE', sname)]
for arg in args:
logdata.append((arg, args[arg][0]))
logging.Log(logdata, level=logging.LOG_LEVEL_MACH)
if 'motd' in args and args['motd'][0]:
config.ui.NotifyMOTD(sname, args['motd'][0])
logged += 1
# FIXME: Really, we should keep track of quota dimensions for
# each kite. At the moment that isn't even reported...
quota_log = []
for quota in parse.Header('X-PageKite-Quota'):
self.quota = [float(quota), None, None]
quota_log.append(('quota_bw', quota))
for quota in parse.Header('X-PageKite-QConns'):
self.q_conns = float(quota)
quota_log.append(('quota_conns', quota))
for quota in parse.Header('X-PageKite-QDays'):
self.q_days = float(quota)
quota_log.append(('quota_days', quota))
for quota in parse.Header('X-PageKite-IPsPerSec'):
quota_log.append(('ips_per_sec', quota))
try:
config.ui.NotifyIPsPerSec(*[int(i) for i in quota.split('/')])
except ValueError:
pass
if quota_log:
self.Log([('FE', sname)] + quota_log)
logged += 1
invalid_reasons = {}
for request in parse.Header('X-PageKite-Invalid-Why'):
# This is future-compatible, in that we can add more fields later.
details = request.split(';')
invalid_reasons[details[0]] = details[1]
logged += 1
for request in parse.Header('X-PageKite-Invalid'):
have_kite_info = True
proto, domain, srand = request.split(':')
reason = invalid_reasons.get(request, 'unknown')
self.Log([('FE', sname),
('err', 'Rejected'),
('proto', proto),
('reason', reason),
('domain', domain)],
level=logging.LOG_LEVEL_WARN)
config.ui.NotifyKiteRejected(proto, domain, reason, crit=True)
config.SetBackendStatus(domain, proto, add=BE_STATUS_ERR_TUNNEL)
logged += 1
for request in parse.Header('X-PageKite-Duplicate'):
have_kite_info = True
proto, domain, srand = request.split(':')
self.Log([('FE', self.server_info[self.S_NAME]),
('err', 'Duplicate'),
('proto', proto),
('domain', domain)],
level=logging.LOG_LEVEL_WARN)
config.ui.NotifyKiteRejected(proto, domain, 'duplicate')
config.SetBackendStatus(domain, proto, add=BE_STATUS_ERR_TUNNEL)
logged += 1
ssl_available = {}
for request in parse.Header('X-PageKite-SSL-OK'):
ssl_available[request] = True
logged += 1
for request in parse.Header('X-PageKite-OK'):
have_kite_info = True
have_kites += 1
proto, domain, srand = request.split(':')
self.conns.Tunnel(proto, domain, self)
status = BE_STATUS_OK
if request in ssl_available:
status |= BE_STATUS_REMOTE_SSL
self.remote_ssl[(proto, domain)] = True
self.Log([('FE', sname),
('proto', proto),
('domain', domain),
('ssl', (request in ssl_available))],
level=logging.LOG_LEVEL_INFO)
config.SetBackendStatus(domain, proto, add=status)
logged += 1
if logged:
if self.quota and self.quota[0] is not None:
config.ui.NotifyQuota(self.quota[0], self.q_days, self.q_conns)
# Also log the server capabilities
logging.Log([
('FE', sname),
('ports', ','.join(self.server_info[self.S_PORTS])),
('protocols', ','.join(self.server_info[self.S_PROTOS])),
('raw_ports', ','.join(self.server_info[self.S_RAW_PORTS] or []))])
return have_kite_info and have_kites
def _BackEnd(server, backends, require_all, conns):
"""This is the back-end end of a tunnel."""
self = Tunnel(conns)
if conns and not conns.config.isfrontend:
self.ExtendSSLRetryDelays()
self.backends = backends
self.require_all = require_all
self.server_info[self.S_NAME] = server
abort = True
try:
try:
data, parse = self._Connect(server, conns)
except:
logging.LogError('Error in connect: %s' % format_exc())
raise
if data and parse:
# Collect info about front-end capabilities, for interactive config
self.ParsePageKiteCapabilities(parse)
for sessionid in parse.Header('X-PageKite-SessionID'):
conns.SetAltId(self, sessionid)
conns.config.servers_sessionids[server] = sessionid
for upgrade in parse.Header('Upgrade'):
if upgrade.lower() == 'websocket':
self.EnableWebsockets()
abort = data = parse = False
tryagain, tokens = self.CheckForTokens(parse)
if tryagain:
if self.server_info[self.S_ADD_KITES]:
request = PageKiteRequestHeaders(server, conns.config.backends,
tokens)
abort = not self.SendChunked(('NOOP: 1\r\n%s\r\n\r\n!'
) % ''.join(request),
compress=False, just_buffer=True)
data = parse = None
else:
try:
data, parse = self._Connect(server, conns, tokens)
except:
logging.LogError('Error in connect: %s' % format_exc())
raise
if data and parse:
kites = self.HandlePageKiteResponse(parse)
abort = (kites is None) or (kites < 1)
except socket.error:
self.Cleanup()
return None
except Exception as e:
self.LogError('Connect failed: %s' % e)
self.Cleanup()
return None
if abort:
return False
conns.Add(self)
self.CountAs('frontends_live')
self.last_activity = time.time()
return self
FrontEnd = staticmethod(_FrontEnd)
BackEnd = staticmethod(_BackEnd)
def Send(self, data, try_flush=False, activity=False, just_buffer=False,
allow_blocking=True):
try:
if TUNNEL_SOCKET_BLOCKS and allow_blocking and not just_buffer:
if self.fd is not None:
self.fd.setblocking(1)
return ChunkParser.Send(self, data, try_flush=try_flush,
activity=activity,
just_buffer=just_buffer,
allow_blocking=allow_blocking)
finally:
if TUNNEL_SOCKET_BLOCKS and allow_blocking and not just_buffer:
if self.fd is not None:
self.fd.setblocking(0)
def SendData(self, conn, data, sid=None, host=None, proto=None, port=None,
chunk_headers=None):
sid = int(sid or conn.sid)
if conn: self.users[sid] = conn
if not sid in self.zhistory: self.zhistory[sid] = [0, 0]
# Pass outgoing data through any defined filters
for f in self.filters:
if 'data_out' in f.FILTERS:
try:
data = f.filter_data_out(self, sid, data)
except:
logging.LogWarning(('Ignoring error in filter_out %s: %s'
) % (f, format_exc()))
sending = ['SID: %s\r\n' % sid]
if proto: sending.append('Proto: %s\r\n' % proto)
if host: sending.append('Host: %s\r\n' % host)
if port:
porti = int(port)
if self.conns and (porti in self.conns.config.server_portalias):
sending.append('Port: %s\r\n' % self.conns.config.server_portalias[porti])
else:
sending.append('Port: %s\r\n' % port)
if chunk_headers:
for ch in chunk_headers: sending.append('%s: %s\r\n' % ch)
sending.append('\r\n')
# Small amounts of data we just send...
if len(data) <= 1024:
sending.append(data)
return self.SendChunked(sending, zhistory=self.zhistory.get(sid))
# Larger amounts we break into fragments at the FE, to work around bugs
# in some of our small-buffered embedded clients. We aim for roughly
# one fragment per packet, assuming an MTU of 1500 bytes. We use
# much larger fragments at the back-end, relays can be assumed to
# be up-to-date and larger chunks saves CPU and improves throughput.
frag_size = self.conns.config.isfrontend and 1024 or (self.maxread+1024)
sending.append('')
frag_size = max(frag_size, 1400-len(''.join(sending)))
first = True
while data or first:
sending[-1] = data[:frag_size]
if not self.SendChunked(sending, zhistory=self.zhistory.get(sid)):
return False
data = data[frag_size:]
if first:
sending = ['SID: %s\r\n' % sid, '\r\n', '']
frag_size = max(frag_size, 1400-len(''.join(sending)))
first = False
return True
def SendStreamEof(self, sid, write_eof=False, read_eof=False):
return self.SendChunked('SID: %s\r\nEOF: 1%s%s\r\n\r\nBye!' % (sid,
(write_eof or not read_eof) and 'W' or '',
(read_eof or not write_eof) and 'R' or ''),
compress=False)
def EofStream(self, sid, eof_type='WR'):
if sid in self.users and self.users[sid] is not None:
write_eof = (-1 != eof_type.find('W'))
read_eof = (-1 != eof_type.find('R'))
self.users[sid].ProcessTunnelEof(read_eof=(read_eof or not write_eof),
write_eof=(write_eof or not read_eof))
def CloseStream(self, sid, stream_closed=False):
if sid in self.users:
stream = self.users[sid]
del self.users[sid]
if not stream_closed and stream is not None:
stream.CloseTunnel(tunnel_closed=True)
if sid in self.zhistory:
del self.zhistory[sid]
def ResetRemoteZChunks(self):
return self.SendChunked('NOOP: 1\r\nZRST: 1\r\n\r\n!',
compress=False, just_buffer=True)
def TriggerPing(self):
when = time.time() - PING_GRACE_MIN - PING_INTERVAL_MAX
self.last_ping = self.last_activity = when
def SendPing(self):
now = time.time()
self.last_ping = int(now)
self.Log([
('FE', self.server_info[self.S_NAME]),
('pinged_tunnel', '@%.4f' % now)],
level=logging.LOG_LEVEL_DEBUG)
return self.SendChunked('NOOP: 1\r\nPING: %.4f\r\n\r\n!' % now,
compress=False, just_buffer=True)
def ProcessPong(self, pong):
try:
rtt = int(1000*(time.time()-float(pong)))
if self.weighted_rtt < 0:
self.weighted_rtt = rtt
else:
self.weighted_rtt = int(0.9 * self.weighted_rtt + 0.1 * rtt)
sname = self.server_info[self.S_NAME]
log_info = [('FE', sname),
('tunnel_ping_ms', '%d' % rtt),
('tunnel_ping_wrtt', '%d' % self.weighted_rtt)]
if self.weighted_rtt > 2500: # Magic number: 2.5 seconds is a long time!
if not self.conns.config.isfrontend:
# If the weighted RTT is this high, then we've had poor connectivity
# for quite some time. Set things in motion to try another relay.
self.conns.config.servers_errored[sname] = time.time()
self.conns.config.last_frontend_choice = 0
# Avoid re-triggering again right away
self.weighted_rtt = 0
log_info.append(('flagged', 'Flagged relay as broken'))
self.Log(log_info, level=(
logging.LOG_LEVEL_WARN if ('flagged' in log_info) else
logging.LOG_LEVEL_INFO))
if common.gYamon:
common.gYamon.ladd('tunnel_rtt', rtt)
common.gYamon.ladd('tunnel_wrtt', self.weighted_rtt)
except ValueError:
pass
def SendPong(self, data):
if (self.conns.config.isfrontend and
self.quota and (self.quota[0] >= 0)):
# May as well make ourselves useful!
return self.SendQuota(pong=data[:64])
else:
return self.SendChunked('NOOP: 1\r\nPONG: %s\r\n\r\n!' % data[:64],
compress=False, just_buffer=True)
def SendQuota(self, pong=''):
if pong:
pong = 'PONG: %s\r\n' % pong
if self.q_days is not None:
return self.SendChunked(('NOOP: 1\r\n%sQuota: %s\r\nQDays: %s\r\nQConns: %s\r\n\r\n!'
) % (pong, self.quota[0], self.q_days, self.q_conns),
compress=False, just_buffer=True)
else:
return self.SendChunked(('NOOP: 1\r\n%sQuota: %s\r\n\r\n!'
) % (pong, self.quota[0]),
compress=False, just_buffer=True)
def SendProgress(self, sid, conn):
msg = ('NOOP: 1\r\n'
'SID: %s\r\n'
'SKB: %d\r\n\r\n') % (sid, (conn.all_out + conn.wrote_bytes)/1024)
return self.SendChunked(msg, compress=False, just_buffer=True)
def ProcessCorruptChunk(self, data):
self.ResetRemoteZChunks()
return True
def Probe(self, host):
for bid in self.conns.config.backends:
be = self.conns.config.backends[bid]
if be[BE_DOMAIN] == host:
bhost, bport = (be[BE_BHOST], be[BE_BPORT])
# FIXME: Should vary probe by backend type
if self.conns.config.Ping(bhost, int(bport)) > 2:
return False
return True
def ProgressTo(self, parse):
try:
sid = int(parse.Header('SID')[0])
skb = int((parse.Header('SKB') or [-1])[0])
if sid in self.users:
self.users[sid].RecordProgress(skb)
except:
logging.LogError(('Tunnel::ProgressTo: That made no sense! %s'
) % format_exc())
return True
# If a tunnel goes down, we just go down hard and kill all our connections.
def ProcessEofRead(self):
self.Die()
return False
def ProcessEofWrite(self):
return self.ProcessEofRead()
def ProcessChunkQuotaInfo(self, parse):
new_quota = 0
if parse.Header('QDays'):
self.q_days = new_quota = int(parse.Header('QDays'))
if parse.Header('QConns'):
self.q_conns = new_quota = int(parse.Header('QConns'))
if parse.Header('Quota'):
new_quota = 1
if self.quota:
self.quota[0] = int(parse.Header('Quota')[0])
else:
self.quota = [int(parse.Header('Quota')[0]), None, None]
if new_quota:
self.conns.config.ui.NotifyQuota(self.quota[0],
self.q_days, self.q_conns)
def ProcessChunkDirectives(self, parse):
if parse.Header('PONG'):
self.ProcessPong(parse.Header('PONG')[0])
if parse.Header('PING'):
return self.SendPong(parse.Header('PING')[0])
if parse.Header('ZRST') and not self.ResetZChunks():
return False
if parse.Header('SPD') or parse.Header('SKB'):
if not self.ProgressTo(parse):
return False
if parse.Header('NOOP'):
return True
return None
def FilterIncoming(self, sid, data=None, info=None, connecting=False):
"""Pass incoming data through filters, if we have any."""
for f in self.filters:
if 'data_in' in f.FILTERS or (connecting and 'connected' in f.FILTERS):
try:
if sid and info:
f.filter_set_sid(sid, info)
if connecting and 'connected' in f.FILTERS:
data = f.filter_connected(self, sid, data)
if data is not None:
data = f.filter_data_in(self, sid, data)
except:
logging.LogWarning(('Ignoring error in filter_in %s: %s'
) % (f, format_exc()))
return data
def GetChunkDestination(self, parse):
return ((parse.Header('Proto') or [''])[0].lower(),
(parse.Header('Port') or [''])[0].lower(),
(parse.Header('Host') or [''])[0].lower(),
(parse.Header('RIP') or [''])[0].lower(),
(parse.Header('RPort') or [''])[0].lower(),
(parse.Header('RTLS') or [''])[0].lower())
def ReplyToProbe(self, proto, sid, host):
if self.conns.config.no_probes:
what, reply = 'rejected', HTTP_NoFeConnection(proto)
elif self.Probe(host):
what, reply = 'good', HTTP_GoodBeConnection(proto)
else:
what, reply = 'back-end down', HTTP_NoBeConnection(proto)
self.LogDebug('Responding to probe for %s: %s' % (host, what))
return self.SendChunked('SID: %s\r\n\r\n%s' % (sid, reply))
def ConnectBE(self, sid, proto, port, host, rIp, rPort, rTLS, data):
conn = UserConn.BackEnd(proto, host, sid, self, port,
remote_ip=rIp, remote_port=rPort, data=data)
if self.filters:
if conn:
rewritehost = conn.config.get('rewritehost')
if rewritehost is True:
rewritehost = conn.backend[BE_BHOST]
else:
rewritehost = False
data = self.FilterIncoming(sid, data, info={
'proto': proto,
'port': port,
'host': host,
'remote_ip': rIp,
'remote_port': rPort,
'using_tls': rTLS,
'be_host': conn and conn.backend[BE_BHOST],
'be_port': conn and conn.backend[BE_BPORT],
'trusted': conn and (conn.security or
conn.config.get('insecure', False)),
'rawheaders': conn and conn.config.get('rawheaders', False),
'proxyproto': conn and conn.config.get('proxyproto', False),
'rewritehost': rewritehost
}, connecting=True)
if proto in ('http', 'http2', 'http3', 'websocket'):
if conn and data.startswith(HttpSecurityFilter.REJECT):
# Pretend we need authentication for dangerous URLs
conn.Die()
conn, data, code = False, '', 500
else:
code = (conn is None) and 503 or 401
if not conn:
# conn is None means we have no back-end.
# conn is False means authentication is required.
if not self.SendChunked('SID: %s\r\n\r\n%s' % (sid,
self.HTTP_Unavail(
self.conns.config, 'be', proto, host,
code=code
)), just_buffer=True):
return False, False
else:
conn = None
elif not conn and proto == 'https':
if not self.SendChunked('SID: %s\r\n\r\n%s' % (sid,
TLS_Unavailable(unavailable=True)),
just_buffer=True):
return False, False
if conn:
self.users[sid] = conn
return conn, data
def ProcessKiteUpdates(self, parse):
# Look for requests for new tunnels
if self.conns.config.isfrontend:
self.ProcessPageKiteHeaders(parse)
requests = self.GetKiteRequests(parse)
if requests:
self.conns.auth().check(requests[:], self,
lambda r, l: self.ChunkAuthCallback(r, l))
else:
self.ParsePageKiteCapabilities(parse)
# Look for responses to requests for new tunnels
tryagain, tokens = self.CheckForTokens(parse)
if tryagain:
server = self.server_info[self.S_NAME]
backends = { }
for bid in tokens:
backends[bid] = self.conns.config.backends[bid]
request = ''.join(PageKiteRequestHeaders(server, backends, tokens))
self.SendChunked('NOOP: 1\r\n%s\r\n\r\n!' % request,
compress=False, just_buffer=True)
kites = self.HandlePageKiteResponse(parse)
if (kites is not None) and (kites < 1):
self.Die()
def ProcessChunk(self, data):
# First, we process the chunk headers.
try:
headers, data = data.split('\r\n\r\n', 1)
parse = HttpLineParser(lines=headers.splitlines(),
state=HttpLineParser.IN_HEADERS)
# Process PING/NOOP/etc: may result in a short-circuit.
rv = self.ProcessChunkDirectives(parse)
if rv is not None:
# Update quota and kite information if necessary: this data is
# always sent along with a NOOP, so checking for it here is safe.
self.ProcessChunkQuotaInfo(parse)
self.ProcessKiteUpdates(parse)
return rv
sid = int(parse.Header('SID')[0])
eof = parse.Header('EOF')
except:
logging.LogError(('Tunnel::ProcessChunk: Corrupt chunk: %s'
) % format_exc())
return False
# EOF stream?
if eof:
self.EofStream(sid, eof[0])
return True
# Headers done, not EOF: let's get the other end of this connection.
if sid in self.users:
# Either from pre-existing connections...
conn = self.users[sid]
if self.filters:
data = self.FilterIncoming(sid, data)
else:
# ... or we connect to a back-end.
proto, port, host, rIp, rPort, rTLS = self.GetChunkDestination(parse)
if proto and host:
# Probe requests are handled differently (short circuit)
if proto.startswith('probe'):
return self.ReplyToProbe(proto, sid, host)
conn, data = self.ConnectBE(sid, proto, port, host,
rIp, rPort, rTLS, data)
if conn is False:
return False
else:
conn = None
# Send the data or shut down.
if conn:
if data and not conn.Send(data, try_flush=True):
# If that failed something is wrong, but we'll let the outer
# select/epoll loop catch and handle it.
pass
else:
# No connection? Close this stream.
self.CloseStream(sid)
return self.SendStreamEof(sid) and self.Flush()
return True
class LoopbackTunnel(Tunnel):
"""A Tunnel which just loops back to this process."""
def __init__(self, conns, which, backends):
Tunnel.__init__(self, conns)
if self.fd:
self.fd = None
self.weighted_rtt = -1000
self.backends = backends
self.require_all = True
self.server_info[self.S_NAME] = LOOPBACK[which]
self.other_end = None
self.which = which
self.buffer_count = 0
self.CountAs('loopbacks_live')
if which == 'FE':
for d in list(six.iterkeys(backends)):
if backends[d][BE_BHOST]:
proto, domain = d.split(':')
self.conns.Tunnel(proto, domain, self)
self.Log([('FE', self.server_info[self.S_NAME]),
('proto', proto),
('domain', domain)])
def __str__(self):
return '%s %s' % (Tunnel.__str__(self), self.which)
def Cleanup(self, close=True):
Tunnel.Cleanup(self, close=close)
other = self.other_end
self.other_end = None
if other and other.other_end:
other.Cleanup(close=close)
def Linkup(self, other):
"""Links two LoopbackTunnels together."""
self.other_end = other
other.other_end = self
return other
def _Loop(conns, backends):
"""Creates a loop, returning the back-end tunnel object."""
return LoopbackTunnel(conns, 'FE', backends
).Linkup(LoopbackTunnel(conns, 'BE', backends))
Loop = staticmethod(_Loop)
# FIXME: This is a zero-length tunnel, but the code relies in some places
# on the tunnel having a length. We really need a pipe here, or
# things will go horribly wrong now and then. For now we hack this by
# separating Write and Flush and looping back only on Flush.
def Send(self, data, try_flush=False, activity=False, just_buffer=True,
allow_blocking=True):
if self.write_blocked:
data = [self.write_blocked] + data
self.write_blocked = ''
joined_data = ''.join(data)
if try_flush or (len(joined_data) > 10240) or (self.buffer_count >= 100):
if logging.DEBUG_IO:
print('|%s| %s \n|%s| %s' % (self.which, self, self.which, data))
self.buffer_count = 0
return self.other_end.ProcessData(joined_data)
else:
self.buffer_count += 1
self.write_blocked = joined_data
return True
class UserConn(Selectable):
"""A Selectable representing a user's connection."""
def __init__(self, address, ui=None):
Selectable.__init__(self, address=address, ui=ui)
self.Reset()
def Reset(self):
self.tunnel = None
self.conns = None
self.backend = BE_NONE[:]
self.config = {}
self.security = None
def Cleanup(self, close=True):
if close:
self.CloseTunnel()
Selectable.Cleanup(self, close=close)
self.Reset()
def ConnType(self):
if self.backend[BE_BHOST]:
return 'BE=%s:%s' % (self.backend[BE_BHOST], self.backend[BE_BPORT])
else:
return 'FE'
def __str__(self):
return '%s %s' % (Selectable.__str__(self), self.ConnType())
def __html__(self):
return ('<b>Tunnel</b>: <a href="/conn/%s">%s</a><br>'
'%s') % (self.tunnel and self.tunnel.sid or '',
escape_html('%s' % (self.tunnel or ''), quote=False) if PY3 else escape_html('%s' % (self.tunnel or '')),
Selectable.__html__(self))
def IsReadable(self, now):
if self.tunnel and self.tunnel.IsBlocked():
return False
return Selectable.IsReadable(self, now)
def CloseTunnel(self, tunnel_closed=False):
tunnel, self.tunnel = self.tunnel, None
if tunnel and not tunnel_closed:
tunnel.SendStreamEof(self.sid, write_eof=True, read_eof=True)
tunnel.CloseStream(self.sid, stream_closed=True)
self.ProcessTunnelEof(read_eof=True, write_eof=True)
def _FrontEnd(conn, address, proto, host, on_port, body, conns):
# This is when an external user connects to a server and requests a
# web-page. We have to give it to them!
try:
self = UserConn(address, ui=conns.config.ui)
except (ValueError, IOError, OSError):
conn.LogError('Unable to create new connection object!')
return None
self.conns = conns
self.SetConn(conn)
if ':' in host: host, port = host.split(':', 1)
self.proto = oproto = proto
self.host = StripEncodedIP(host)
# If the listening port is an alias for another...
if int(on_port) in conns.config.server_portalias:
on_port = conns.config.server_portalias[int(on_port)]
# Try and find the right tunnel. We prefer proto/port specifications first,
# then the just the proto. If the protocol is WebSocket and no tunnel is
# found, look for a plain HTTP tunnel.
if proto.startswith('probe'):
protos = ['http', 'https', 'websocket', 'raw', 'irc', 'xmpp']
ports = conns.config.server_ports[:]
ports.extend(conns.config.server_aliasport.keys())
ports.extend([x for x in conns.config.server_raw_ports if x != VIRTUAL_PN])
else:
protos = [proto]
ports = [on_port]
if proto == 'websocket': protos.extend(['http', 'http2', 'http3'])
elif proto == 'http': protos.extend(['http2', 'http3'])
tunnels = []
for p in protos:
for prt in ports:
if not tunnels:
tunnels = conns.Tunnel('%s-%s' % (p, prt), host)
if tunnels: self.proto = proto = p
if not tunnels:
tunnels = conns.Tunnel(p, host)
if tunnels: self.proto = proto = p
if not tunnels:
tunnels = conns.Tunnel(protos[0], CATCHALL_HN)
if tunnels: self.proto = proto = protos[0]
if self.address:
chunk_headers = [('RIP', self.address[0]), ('RPort', self.address[1])]
if conn.my_tls: chunk_headers.append(('RTLS', 1))
if len(tunnels) > 1:
tunnels.sort(key=lambda t: t.weighted_rtt)
for tun in tunnels:
rejection = tun.RejectTraffic(conn, address, host)
if rejection and hasattr(conn, 'error_details'):
conn.error_details['rejected'] = rejection
else:
self.tunnel = tun
break
if (self.tunnel and self.tunnel.SendData(self, ''.join(body), host=host,
proto=proto, port=on_port,
chunk_headers=chunk_headers)
and self.conns):
log_info = [('domain', self.host), ('on_port', on_port),
('proto', self.proto), ('is', 'FE')]
if oproto != proto:
log_info.append(('sniffed_proto', proto))
self.Log(log_info)
self.conns.Add(self)
if proto in ('http', 'http2', 'http3', 'websocket'):
self.conns.TrackIP(address[0], host)
# FIXME: Use the tracked data to detect & mitigate abuse?
return self
self.LogDebug('No back-end', [('on_port', on_port), ('proto', self.proto),
('domain', self.host), ('is', 'FE')])
self.Cleanup(close=False)
return None
def _BackEnd(proto, host, sid, tunnel, on_port,
remote_ip=None, remote_port=None, data=None):
# This is when we open a backend connection, because a user asked for it.
try:
self = UserConn(None, ui=tunnel.conns.config.ui)
except (ValueError, IOError, OSError):
tunnel.LogDebug('Unable to create new connection object!')
return None
self.sid = sid
self.proto = proto
self.host = host
self.conns = tunnel.conns
self.tunnel = tunnel
failure = None
# Try and find the right back-end. We prefer proto/port specifications
# first, then the just the proto. If the protocol is WebSocket and no
# tunnel is found, look for a plain HTTP tunnel. Fallback hosts can
# be registered using the http2/3/4 protocols.
backend = None
if proto == 'http':
protos = [proto, 'http2', 'http3']
elif proto.startswith('probe'):
protos = ['http', 'http2', 'http3']
elif proto == 'websocket':
protos = [proto, 'http', 'http2', 'http3']
else:
protos = [proto]
for p in protos:
if not backend:
p_p = '%s-%s' % (p, on_port)
backend, be = self.conns.config.GetBackendServer(p_p, host)
if not backend:
backend, be = self.conns.config.GetBackendServer(p, host)
if not backend:
backend, be = self.conns.config.GetBackendServer(p, CATCHALL_HN)
if backend:
break
logInfo = [
('on_port', on_port),
('proto', proto),
('domain', host),
('is', 'BE')]
# Strip off useless IPv6 prefix, if this is an IPv4 address.
if remote_ip.startswith('::ffff:') and ':' not in remote_ip[7:]:
remote_ip = remote_ip[7:]
if remote_ip:
logInfo.append(('remote_ip', remote_ip))
if not backend or not backend[0]:
self.ui.Notify(('%s - %s://%s:%s (FAIL: no server)'
) % (remote_ip or 'unknown', proto, host, on_port),
prefix='?', color=self.ui.YELLOW)
else:
http_host = '%s/%s' % (be[BE_DOMAIN], be[BE_PORT] or '80')
self.backend = be
self.config = host_config = self.conns.config.be_config.get(http_host, {})
# Access control interception: check remote IP addresses first.
ip_keys = [k for k in host_config if k.startswith('ip/')]
if ip_keys:
k1 = 'ip/%s' % remote_ip
k2 = '.'.join(k1.split('.')[:-1])
if not (k1 in host_config or k2 in host_config):
self.ui.Notify(('%s - %s://%s:%s (IP ACCESS DENIED)'
) % (remote_ip or 'unknown', proto, host, on_port),
prefix='!', color=self.ui.YELLOW)
logInfo.append(('forbidden-ip', '%s' % remote_ip))
backend = None
else:
self.security = 'ip'
# Parse things!
if proto in ('websocket', 'http', 'http2', 'http3'):
http_parse = HttpLineParser(lines=data.splitlines())
logInfo[0:0] = [(http_parse.method, http_parse.path)]
else:
http_parse = None
# Access control interception: check for HTTP Basic authentication.
user_keys = [k for k in host_config if k.startswith('password/')]
if user_keys:
user, pwd, fail = None, None, True
if http_parse:
auth = http_parse.Header('Authorization')
try:
(how, ab64) = auth[0].strip().split()
if how.lower() == 'basic':
user, pwd = base64.decodestring(ab64).split(':')
except:
user = auth
user_key = 'password/%s' % user
if user and user_key in host_config:
if host_config[user_key] == pwd:
fail = False
if fail:
if logging.DEBUG_IO:
print('=== REQUEST\n%s\n===' % data)
self.ui.Notify(('%s - %s://%s:%s (USER ACCESS DENIED)'
) % (remote_ip or 'unknown', proto, host, on_port),
prefix='!', color=self.ui.YELLOW)
logInfo.append(('forbidden-user', '%s' % user))
backend = None
failure = ''
else:
self.security = 'password'
if not backend:
logInfo.append(('err', 'No back-end'))
self.Log(logInfo, level=logging.LOG_LEVEL_ERR)
self.Cleanup(close=False)
return failure
try:
self.SetFD(rawsocket(socket.AF_INET, socket.SOCK_STREAM))
try:
self.fd.settimeout(2.0) # Missing in Python 2.2
except:
self.fd.setblocking(1)
sspec = list(backend)
if len(sspec) == 1: sspec.append(80)
self.fd.connect(tuple(sspec))
self.fd.setblocking(0)
except socket.error as err:
logInfo.append(('socket_error', '%s' % err))
self.ui.Notify(('%s - %s://%s:%s (FAIL: %s:%s is down)'
) % (remote_ip or 'unknown', proto, host, on_port,
sspec[0], sspec[1]),
prefix='!', color=self.ui.YELLOW)
self.Log(logInfo, level=logging.LOG_LEVEL_ERR)
self.Cleanup(close=False)
return None
sspec = (sspec[0], sspec[1])
be_name = (sspec == self.conns.config.ui_sspec) and 'builtin' or ('%s:%s' % sspec)
self.ui.Status('serving')
self.ui.Notify(('%s < %s://%s:%s (%s)'
) % (remote_ip or 'unknown', proto, host, on_port, be_name))
self.Log(logInfo)
self.conns.Add(self)
return self
FrontEnd = staticmethod(_FrontEnd)
BackEnd = staticmethod(_BackEnd)
def Shutdown(self, direction):
try:
if self.fd:
if 'sock_shutdown' in dir(self.fd):
# This is a pyOpenSSL socket, which has incompatible shutdown.
if direction == socket.SHUT_RD:
self.fd.shutdown()
else:
self.fd.sock_shutdown(direction)
else:
self.fd.shutdown(direction)
except Exception as e:
pass
def ProcessTunnelEof(self, read_eof=False, write_eof=False):
rv = True
if write_eof and not self.read_eof:
rv = self.ProcessEofRead(tell_tunnel=False) and rv
if read_eof and not self.write_eof:
rv = self.ProcessEofWrite(tell_tunnel=False) and rv
return rv
def ProcessEofRead(self, tell_tunnel=True):
self.read_eof = True
self.Shutdown(socket.SHUT_RD)
if tell_tunnel and self.tunnel:
self.tunnel.SendStreamEof(self.sid, read_eof=True)
return self.ProcessEof()
def ProcessEofWrite(self, tell_tunnel=True):
self.write_eof = True
if not self.write_blocked:
self.Shutdown(socket.SHUT_WR)
if tell_tunnel and self.tunnel:
self.tunnel.SendStreamEof(self.sid, write_eof=True)
if (self.conns and
self.ConnType() == 'FE' and
(not self.read_eof)):
self.conns.SetIdle(self, 120)
return self.ProcessEof()
def Send(self, data, try_flush=False, activity=True, just_buffer=False,
allow_blocking=True):
rv = Selectable.Send(self, data, try_flush=try_flush, activity=activity,
just_buffer=just_buffer,
allow_blocking=allow_blocking)
if self.write_eof and not self.write_blocked:
self.Shutdown(socket.SHUT_WR)
elif try_flush or not self.write_blocked:
if self.tunnel:
self.tunnel.SendProgress(self.sid, self)
return rv
def ProcessData(self, data):
if not self.tunnel:
self.LogError('No tunnel! %s' % self)
return False
if not self.tunnel.SendData(self, data):
self.LogDebug('Send to tunnel failed')
return False
if self.read_eof:
return self.ProcessEofRead()
return True
class UnknownConn(MagicProtocolParser):
"""This class is a connection which we're not sure what is yet."""
def __init__(self, fd, address, on_port, conns):
MagicProtocolParser.__init__(self, fd, address, on_port, ui=conns.config.ui)
self.peeking = True
self.sid = -1
self.host = None
self.proto = None
self.said_hello = False
self.bad_loops = 0
self.error_details = {}
# Set up our parser chain.
self.parsers = [HttpLineParser]
if IrcLineParser.PROTO in conns.config.server_protos:
self.parsers.append(IrcLineParser)
self.parser = MagicLineParser(parsers=self.parsers)
self.conns = conns
self.conns.Add(self)
self.conns.SetIdle(self, 10)
def Cleanup(self, close=True):
MagicProtocolParser.Cleanup(self, close=close)
self.conns = self.parser = None
def SayHello(self):
if self.said_hello:
return False
else:
self.said_hello = True
if self.on_port in SMTP_PORTS:
self.Send(['220 ready ESMTP PageKite Magic Proxy\n'], try_flush=True)
return True
def __str__(self):
return '%s (%s/%s:%s)' % (MagicProtocolParser.__str__(self),
(self.proto or '?'),
(self.on_port or '?'),
(self.host or '?'))
# Any sort of EOF just means give up: if we haven't figured out what
# kind of connnection this is yet, we won't without more data.
def ProcessEofRead(self):
self.Die(discard_buffer=True)
return self.ProcessEof()
def ProcessEofWrite(self):
self.Die(discard_buffer=True)
return self.ProcessEof()
def ProcessLine(self, line, lines):
if not self.parser: return True
if self.parser.Parse(line) is False: return False
if not self.parser.ParsedOK(): return True
self.parser = self.parser.last_parser
if self.parser.protocol == HttpLineParser.PROTO:
# HTTP has special cases, including CONNECT etc.
return self.ProcessParsedHttp(line, lines)
else:
return self.ProcessParsedMagic(self.parser.PROTOS, line, lines)
def ProcessParsedMagic(self, protos, line, lines):
if (self.conns and
self.conns.config.CheckTunnelAcls(self.address, conn=self)):
for proto in protos:
if UserConn.FrontEnd(self, self.address,
proto, self.parser.domain, self.on_port,
self.parser.lines + lines, self.conns) is not None:
self.Cleanup(close=False)
return True
self.Send([self.parser.ErrorReply(port=self.on_port)], try_flush=True)
self.Cleanup()
return False
def ProcessParsedHttp(self, line, lines):
done = False
if self.parser.method == 'PING':
self.Send('PONG %s\r\n\r\n' % self.parser.path)
self.read_eof = self.write_eof = done = True
self.fd.close()
elif self.parser.method == 'CONNECT':
if self.parser.path.lower().startswith('pagekite:'):
if not self.conns.config.CheckTunnelAcls(self.address, conn=self):
self.Send(HTTP_ConnectBad(code=403, status='Forbidden'),
try_flush=True)
return False
if Tunnel.FrontEnd(self, lines, self.conns) is None:
self.Send(HTTP_ConnectBad(), try_flush=True)
return False
done = True
else:
try:
connect_parser = self.parser
chost, cport = connect_parser.path.split(':', 1)
cport = int(cport)
chost = StripEncodedIP(chost.lower())
sid1 = ':%s' % chost
sid2 = '-%s:%s' % (cport, chost)
tunnels = self.conns.tunnels
if not self.conns.config.CheckClientAcls(self.address, conn=self):
self.Send(self.HTTP_Unavail(
self.conns.config, 'fe', 'raw', chost,
code=403, status='Forbidden',
other_details=self.error_details),
try_flush=True)
return False
# These allow explicit CONNECTs to direct http(s) or raw backends.
# If no match is found, we throw an error.
if cport in (80, 8080):
if (('http'+sid1) in tunnels) or (
('http'+sid2) in tunnels) or (
('http2'+sid1) in tunnels) or (
('http2'+sid2) in tunnels) or (
('http3'+sid1) in tunnels) or (
('http3'+sid2) in tunnels):
(self.on_port, self.host) = (cport, chost)
self.parser = HttpLineParser()
self.Send(HTTP_ConnectOK(), try_flush=True)
return True
whost = chost
if '.' in whost:
whost = '*.' + '.'.join(whost.split('.')[1:])
if cport == 443:
if (('https'+sid1) in tunnels) or (
('https'+sid2) in tunnels) or (
chost in self.conns.config.tls_endpoints) or (
whost in self.conns.config.tls_endpoints):
(self.on_port, self.host) = (cport, chost)
self.parser = HttpLineParser()
self.Send(HTTP_ConnectOK(), try_flush=True)
return self.ProcessTls(''.join(lines), chost)
if (cport in self.conns.config.server_raw_ports or
VIRTUAL_PN in self.conns.config.server_raw_ports):
for raw in ('raw',):
if ((raw+sid1) in tunnels) or ((raw+sid2) in tunnels):
(self.on_port, self.host) = (cport, chost)
self.parser = HttpLineParser()
self.Send(HTTP_ConnectOK(), try_flush=True)
return self.ProcessProto(''.join(lines), raw, self.host)
self.Send(HTTP_ConnectBad(), try_flush=True)
return False
except ValueError:
pass
if (not done and self.parser.method == 'GET'
and self.parser.path in MAGIC_PATHS
and 'v1.pagekite.org' in self.parser.Header('Sec-WebSocket-Protocol')
and 'websocket' in self.parser.Header('Upgrade')):
if not self.conns.config.CheckTunnelAcls(self.address, conn=self):
self.Send(HTTP_ConnectBad(code=403, status='Forbidden'),
try_flush=True)
return False
if Tunnel.FrontEnd(self, lines, self.conns) is None:
self.Send(HTTP_ConnectBad(), try_flush=True)
return False
done = True
if not done:
if not self.host:
hosts = self.parser.Header('Host')
if hosts:
self.host = StripEncodedIP(hosts[0].lower())
else:
self.Send(HTTP_Response(400, 'Bad request',
['<html><body><h1>400 Bad request</h1>',
'<p>Invalid request, no Host: found.</p>',
'</body></html>\n'],
trackable=True,
overloaded=self.conns.config.Overloaded()))
return False
if self.parser.path.startswith(MAGIC_PREFIX):
try:
self.host = StripEncodedIP(self.parser.path.split('/')[2])
if self.parser.path.endswith('.json'):
self.proto = 'probe.json'
else:
self.proto = 'probe'
except ValueError:
pass
if self.proto is None:
self.proto = 'http'
upgrade = self.parser.Header('Upgrade')
if 'websocket' in self.conns.config.server_protos:
if upgrade and upgrade[0].lower() == 'websocket':
self.proto = 'websocket'
if not self.conns.config.CheckClientAcls(self.address, conn=self):
self.Send(self.HTTP_Unavail(
self.conns.config, 'fe', self.proto, self.host,
code=403, status='Forbidden',
other_details=self.error_details),
try_flush=True)
self.Cleanup(close=True)
return False
address = self.address
if int(self.on_port) in self.conns.config.server_portalias:
xfwdf = self.parser.Header('X-Forwarded-For')
if xfwdf and address[0] == '127.0.0.1':
address = (xfwdf[0], address[1])
done = True
if UserConn.FrontEnd(self, address,
self.proto, self.host, self.on_port,
self.parser.lines + lines, self.conns) is None:
if self.proto.startswith('probe'):
self.Send(HTTP_NoFeConnection(self.proto),
try_flush=True)
else:
self.Send(self.HTTP_Unavail(
self.conns.config, 'fe', self.proto, self.host,
overloaded=self.conns.config.Overloaded(),
other_details=self.error_details
), try_flush=True)
self.Cleanup(close=True)
return False
# We are done!
self.Cleanup(close=False)
return True
def ProcessTls(self, data, domain=None):
if (not self.conns or
not self.conns.config.CheckClientAcls(self.address, conn=self)):
self.Send(TLS_Unavailable(forbidden=True), try_flush=True)
return False
if domain:
domains = [domain]
else:
try:
domains = self.GetSni(data)
if not domains:
domains = [self.conns.config.tls_default]
if domains[0]:
self.LogDebug('No SNI - trying: %s' % domains[0])
else:
domains = None
except:
# Probably insufficient data, just True and assume we'll have
# better luck on the next round... but with a timeout.
self.bad_loops += 1
if self.bad_loops < 25:
self.LogDebug('Error in ProcessTLS, will time out in 120 seconds.')
self.conns.SetIdle(self, 120)
return True
else:
self.LogDebug('Persistent error in ProcessTLS, aborting.')
self.Send(TLS_Unavailable(unavailable=True), try_flush=True)
return False
if domains and domains[0] is not None:
if UserConn.FrontEnd(self, self.address,
'https', domains[0], self.on_port,
[data], self.conns) is not None:
# We are done!
self.EatPeeked()
self.Cleanup(close=False)
return True
else:
# If we know how to terminate the TLS/SSL, do so!
ctx = self.conns.config.GetTlsEndpointCtx(domains[0])
if ctx:
self.fd = socks.SSL_Connect(ctx, self.fd,
accepted=True, server_side=True)
self.peeking = False
self.is_tls = False
self.my_tls = True
self.conns.SetIdle(self, 120)
return True
else:
self.Send(TLS_Unavailable(unavailable=True), try_flush=True)
return False
self.Send(TLS_Unavailable(unavailable=True), try_flush=True)
return False
def ProcessProto(self, data, proto, domain):
if (not self.conns or
not self.conns.config.CheckClientAcls(self.address, conn=self)):
return False
if UserConn.FrontEnd(self, self.address,
proto, domain, self.on_port,
[data], self.conns) is None:
return False
# We are done!
self.Cleanup(close=False)
return True
class UiConn(LineParser):
STATE_PASSWORD = 0
STATE_LIVE = 1
def __init__(self, fd, address, on_port, conns):
LineParser.__init__(self, fd=fd, address=address, on_port=on_port)
self.state = self.STATE_PASSWORD
self.conns = conns
self.conns.Add(self)
self.lines = []
self.qc = threading.Condition()
self.challenge = sha1hex('%s%8.8x' % (globalSecret(),
random.randint(0, 0x7FFFFFFD)+1))
self.expect = signToken(token=self.challenge,
secret=self.conns.config.ConfigSecret(),
payload=self.challenge,
length=1000)
self.LogDebug('Expecting: %s' % self.expect)
self.Send('PageKite? %s\r\n' % self.challenge)
def readline(self):
with self.qc:
while not self.lines:
self.qc.wait()
line = self.lines.pop(0)
return line
def write(self, data):
self.conns.config.ui_wfile.write(data)
self.Send(data)
def Cleanup(self):
self.conns.config.ui.wfile = self.conns.config.ui_wfile
self.conns.config.ui.rfile = self.conns.config.ui_rfile
self.lines = self.conns.config.ui_conn = None
self.conns = None
LineParser.Cleanup(self)
def Disconnect(self):
self.Send('Goodbye')
self.Cleanup()
def ProcessLine(self, line, lines):
if self.state == self.STATE_LIVE:
with self.qc:
self.lines.append(line)
self.qc.notify()
return True
elif self.state == self.STATE_PASSWORD:
if line.strip() == self.expect:
if self.conns.config.ui_conn: self.conns.config.ui_conn.Disconnect()
self.conns.config.ui_conn = self
self.conns.config.ui.wfile = self
self.conns.config.ui.rfile = self
self.state = self.STATE_LIVE
self.Send('OK!\r\n')
return True
else:
self.Send('Sorry.\r\n')
return False
else:
return False
class RawConn(Selectable):
"""This class is a raw/timed connection."""
def __init__(self, fd, address, on_port, conns):
Selectable.__init__(self, fd, address, on_port)
self.my_tls = False
self.is_tls = False
domain = conns.LastIpDomain(address[0])
if domain and UserConn.FrontEnd(self, address, 'raw', domain, on_port,
[], conns):
self.Cleanup(close=False)
else:
self.Cleanup()
class FastPingHelper(threading.Thread):
def __init__(self, conns):
threading.Thread.__init__(self)
self.daemon = True
self.lock = threading.Lock()
self.conns = conns
self.config = conns.config
self.clients = []
self.rejection = None
self.overloaded = False
self.waiting = True
self.sleeptime = 0.03
self.fast_pinged = []
self.next_pinglog = time.time() + 1
self.wq = Queue()
self.up_rejection()
def up_rejection(self):
self.overloaded = self.config.Overloaded()
self.rejection = HTTP_Unavailable('fe', 'http', 'ping.pagekite',
overloaded=self.overloaded,
advertise=False,
dns_hints=self.config.DNS_Hints())
def add_client(self, client, addr, handler):
client.setblocking(0)
with self.lock:
self.clients.append((time.time(), client, addr, handler))
if self.waiting:
self.wq.put(1)
def run_once(self):
now = time.time()
with self.lock:
_clients, self.clients = self.clients, []
for ts, client, addr, handler in _clients:
try:
data = s(client.recv(64, socket.MSG_PEEK))
except:
data = None
try:
if data:
if '\nHost: ping.pagekite' in data:
client.send(b(self.rejection))
client.close()
self.fast_pinged.append(obfuIp(addr[0]))
else:
handler(client, addr)
elif ts > (now-5):
with self.lock:
self.clients.append((ts, client, addr, handler))
else:
logging.LogDebug('Timeout, dropping ' + obfuIp(addr[0]))
client.close()
except IOError:
logging.LogDebug('IOError, dropping ' + obfuIp(addr[0]))
# No action: just let the client get garbage collected
except:
logging.LogDebug('Error in FastPing: ' + format_exc())
if now > self.next_pinglog:
logging.LogDebug('Fast ping %s %d clients: %s' % (
'discouraged' if self.overloaded else 'welcomed',
len(self.fast_pinged),
', '.join(self.fast_pinged)))
self.fast_pinged = []
self.up_rejection()
self.next_pinglog = now + 1
self.sleeptime = max(0, (now + 0.015) - time.time())
def run_until(self, deadline):
try:
while (time.time() + self.sleeptime) < deadline and self.clients:
with self.lock:
self.waiting = True
while not self.wq.empty():
self.wq.get()
self.waiting = False
time.sleep(self.sleeptime)
self.run_once()
except:
logging.LogError('FastPingHelper crashed: ' + format_exc())
def run(self):
while True:
try:
while True:
with self.lock:
self.waiting = True
while not self.clients or not self.wq.empty():
self.wq.get()
self.waiting = False
time.sleep(self.sleeptime)
self.run_once()
except:
logging.LogError('FastPingHelper crashed: ' + format_exc())
time.sleep(1)
class Listener(Selectable):
"""This class listens for incoming connections and accepts them."""
def __init__(self, host, port, conns, backlog=100,
connclass=UnknownConn, quiet=False, acl=None):
Selectable.__init__(self, bind=(host, port), backlog=backlog)
self.Log([('listen', '%s:%s' % (host, port))])
if not quiet:
conns.config.ui.Notify(' - Listening on %s:%s' % (host or '*', port))
self.acl = acl
self.acl_match = None
self.connclass = connclass
self.port = port
self.conns = conns
self.conns.Add(self)
self.CountAs('listeners_live')
def __str__(self):
return '%s port=%s' % (Selectable.__str__(self), self.port)
def __html__(self):
return '<p>Listening on port %s for %s</p>' % (self.port, self.connclass)
def check_acl(self, ipaddr, default=True):
if self.acl and os.path.exists(self.acl):
try:
ipaddr = '%s' % ipaddr
lc = 0
with open(self.acl, 'r') as fd:
for line in fd:
line = line.lower().strip()
lc += 1
if line.startswith('#') or not line:
continue
try:
words = line.split()
pattern, rule = words[:2]
reason = ' '.join(words[2:])
if ipaddr == pattern:
self.acl_match = (lc, pattern, rule, reason)
return bool('allow' in rule)
elif re.compile(pattern).match(ipaddr):
self.acl_match = (lc, pattern, rule, reason)
return bool('allow' in rule)
except IndexError:
self.LogDebug('Invalid line %d in ACL %s' % (lc, self.acl))
except:
self.LogDebug(
'Failed to read/parse %s: %s' % (self.acl, format_exc()))
self.acl_match = (0, '.*', default and 'allow' or 'reject', 'Default')
return default
def HandleClient(self, client, address):
log_info = [('port', self.port)]
if self.check_acl(address[0]):
log_info += [('accept', '%s:%s' % (obfuIp(address[0]), address[1]))]
uc = self.connclass(client, address, self.port, self.conns)
else:
log_info += [('reject', '%s:%s' % (obfuIp(address[0]), address[1]))]
client.close()
if self.acl:
log_info += [('acl_line', '%s' % self.acl_match[0]),
('reason', self.acl_match[3])]
self.Log(log_info)
return True
def ReadData(self, maxread=None):
try:
self.sstate = 'accept'
self.last_activity = time.time()
client, address = self.fd.accept()
if self.port not in SMTP_PORTS:
while client:
try:
self.conns.ping_helper.add_client(client, address, self.HandleClient)
client, address = self.fd.accept()
except IOError:
client = None
elif client:
self.sstate = 'client'
self.HandleClient(client, address)
self.sstate = (self.dead and 'dead' or 'idle')
return True
except IOError as err:
self.sstate += '/ioerr=%s' % (err.errno,)
self.LogDebug('Listener::ReadData: error: %s (%s)' % (err, err.errno))
except socket.error as e:
(errno, msg) = e
self.sstate += '/sockerr=%s' % (errno,)
self.LogInfo('Listener::ReadData: error: %s (errno=%s)' % (msg, errno))
except Exception as e:
self.sstate += '/exc'
self.LogDebug('Listener::ReadData: %s' % e)
return True
| pagekite/PyPagekite | pagekite/proto/conns.py | Python | agpl-3.0 | 74,627 | 0.011457 |
"""
Tests for xblock_utils.py
"""
import uuid
import ddt
import six
from django.conf import settings
from django.test.client import RequestFactory
from mock import patch
from opaque_keys.edx.asides import AsideUsageKeyV1, AsideUsageKeyV2
from web_fragments.fragment import Fragment
from xblock.core import XBlockAside
from openedx.core.lib.url_utils import quote_slashes
from openedx.core.lib.xblock_builtin import get_css_dependencies, get_js_dependencies
from openedx.core.lib.xblock_utils import (
get_aside_from_xblock,
is_xblock_aside,
replace_course_urls,
replace_jump_to_id_urls,
replace_static_urls,
request_token,
sanitize_html_id,
wrap_fragment,
wrap_xblock
)
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.test_asides import AsideTestType
@ddt.ddt
class TestXblockUtils(SharedModuleStoreTestCase):
"""
Tests for xblock utility functions.
"""
@classmethod
def setUpClass(cls):
super(TestXblockUtils, cls).setUpClass()
cls.course_mongo = CourseFactory.create(
default_store=ModuleStoreEnum.Type.mongo,
org='TestX',
number='TS01',
run='2015'
)
cls.course_split = CourseFactory.create(
default_store=ModuleStoreEnum.Type.split,
org='TestX',
number='TS02',
run='2015'
)
def create_fragment(self, content=None):
"""
Create a fragment.
"""
fragment = Fragment(content)
fragment.add_css('body {background-color:red;}')
fragment.add_javascript('alert("Hi!");')
return fragment
def test_wrap_fragment(self):
"""
Verify that wrap_fragment adds new content.
"""
new_content = '<p>New Content<p>'
fragment = self.create_fragment()
wrapped_fragment = wrap_fragment(fragment, new_content)
self.assertEqual('<p>New Content<p>', wrapped_fragment.content)
self.assertEqual('body {background-color:red;}', wrapped_fragment.resources[0].data)
self.assertEqual('alert("Hi!");', wrapped_fragment.resources[1].data)
def test_request_token(self):
"""
Verify that a proper token is returned.
"""
request_with_token = RequestFactory().get('/')
request_with_token._xblock_token = '123' # pylint: disable=protected-access
token = request_token(request_with_token)
self.assertEqual(token, '123')
request_without_token = RequestFactory().get('/')
token = request_token(request_without_token)
# Test to see if the token is an uuid1 hex value
test_uuid = uuid.UUID(token, version=1)
self.assertEqual(token, test_uuid.hex)
@ddt.data(
('course_mongo', 'data-usage-id="i4x:;_;_TestX;_TS01;_course;_2015"'),
('course_split', 'data-usage-id="block-v1:TestX+TS02+2015+type@course+block@course"')
)
@ddt.unpack
def test_wrap_xblock(self, course_id, data_usage_id):
"""
Verify that new content is added and the resources are the same.
"""
fragment = self.create_fragment(u"<h1>Test!</h1>")
course = getattr(self, course_id)
test_wrap_output = wrap_xblock(
runtime_class='TestRuntime',
block=course,
view='baseview',
frag=fragment,
context={"wrap_xblock_data": {"custom-attribute": "custom-value"}},
usage_id_serializer=lambda usage_id: quote_slashes(six.text_type(usage_id)),
request_token=uuid.uuid1().hex
)
self.assertIsInstance(test_wrap_output, Fragment)
self.assertIn('xblock-baseview', test_wrap_output.content)
self.assertIn('data-runtime-class="TestRuntime"', test_wrap_output.content)
self.assertIn(data_usage_id, test_wrap_output.content)
self.assertIn('<h1>Test!</h1>', test_wrap_output.content)
self.assertIn('data-custom-attribute="custom-value"', test_wrap_output.content)
self.assertEqual(test_wrap_output.resources[0].data, u'body {background-color:red;}')
self.assertEqual(test_wrap_output.resources[1].data, 'alert("Hi!");')
@ddt.data('course_mongo', 'course_split')
def test_replace_jump_to_id_urls(self, course_id):
"""
Verify that the jump-to URL has been replaced.
"""
course = getattr(self, course_id)
test_replace = replace_jump_to_id_urls(
course_id=course.id,
jump_to_id_base_url='/base_url/',
block=course,
view='baseview',
frag=Fragment('<a href="/jump_to_id/id">'),
context=None
)
self.assertIsInstance(test_replace, Fragment)
self.assertEqual(test_replace.content, '<a href="/base_url/id">')
@ddt.data(
('course_mongo', '<a href="/courses/TestX/TS01/2015/id">'),
('course_split', '<a href="/courses/course-v1:TestX+TS02+2015/id">')
)
@ddt.unpack
def test_replace_course_urls(self, course_id, anchor_tag):
"""
Verify that the course URL has been replaced.
"""
course = getattr(self, course_id)
test_replace = replace_course_urls(
course_id=course.id,
block=course,
view='baseview',
frag=Fragment('<a href="/course/id">'),
context=None
)
self.assertIsInstance(test_replace, Fragment)
self.assertEqual(test_replace.content, anchor_tag)
@ddt.data(
('course_mongo', '<a href="/c4x/TestX/TS01/asset/id">'),
('course_split', '<a href="/asset-v1:TestX+TS02+2015+type@asset+block/id">')
)
@ddt.unpack
def test_replace_static_urls(self, course_id, anchor_tag):
"""
Verify that the static URL has been replaced.
"""
course = getattr(self, course_id)
test_replace = replace_static_urls(
data_dir=None,
course_id=course.id,
block=course,
view='baseview',
frag=Fragment('<a href="/static/id">'),
context=None
)
self.assertIsInstance(test_replace, Fragment)
self.assertEqual(test_replace.content, anchor_tag)
def test_sanitize_html_id(self):
"""
Verify that colons and dashes are replaced.
"""
dirty_string = 'I:have-un:allowed_characters'
clean_string = sanitize_html_id(dirty_string)
self.assertEqual(clean_string, 'I_have_un_allowed_characters')
@ddt.data(
(True, ["combined.css"]),
(False, ["a.css", "b.css", "c.css"]),
)
@ddt.unpack
def test_get_css_dependencies(self, pipeline_enabled, expected_css_dependencies):
"""
Verify that `get_css_dependencies` returns correct list of files.
"""
pipeline = settings.PIPELINE.copy()
pipeline['PIPELINE_ENABLED'] = pipeline_enabled
pipeline['STYLESHEETS'] = {
'style-group': {
'source_filenames': ["a.css", "b.css", "c.css"],
'output_filename': "combined.css"
}
}
with self.settings(PIPELINE=pipeline):
css_dependencies = get_css_dependencies("style-group")
self.assertEqual(css_dependencies, expected_css_dependencies)
@ddt.data(
(True, ["combined.js"]),
(False, ["a.js", "b.js", "c.js"]),
)
@ddt.unpack
def test_get_js_dependencies(self, pipeline_enabled, expected_js_dependencies):
"""
Verify that `get_js_dependencies` returns correct list of files.
"""
pipeline = settings.PIPELINE.copy()
pipeline['PIPELINE_ENABLED'] = pipeline_enabled
pipeline['JAVASCRIPT'] = {
'js-group': {
'source_filenames': ["a.js", "b.js", "c.js"],
'output_filename': "combined.js"
}
}
with self.settings(PIPELINE=pipeline):
js_dependencies = get_js_dependencies("js-group")
self.assertEqual(js_dependencies, expected_js_dependencies)
class TestXBlockAside(SharedModuleStoreTestCase):
"""Test the xblock aside function."""
@classmethod
def setUpClass(cls):
super(TestXBlockAside, cls).setUpClass()
cls.course = CourseFactory.create()
cls.block = ItemFactory.create(category='aside', parent=cls.course)
cls.aside_v2 = AsideUsageKeyV2(cls.block.scope_ids.usage_id, "aside")
cls.aside_v1 = AsideUsageKeyV1(cls.block.scope_ids.usage_id, "aside")
def test_is_xblock_aside(self):
"""test if xblock is aside"""
assert is_xblock_aside(self.aside_v2) is True
assert is_xblock_aside(self.aside_v1) is True
def test_is_not_xblock_aside(self):
"""test if xblock is not aside"""
assert is_xblock_aside(self.block.scope_ids.usage_id) is False
@patch('xmodule.modulestore.xml.ImportSystem.applicable_aside_types', lambda self, block: ['test_aside'])
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
def test_get_aside(self):
"""test get aside success"""
assert get_aside_from_xblock(self.block, six.text_type("test_aside")) is not None
| msegado/edx-platform | openedx/core/lib/tests/test_xblock_utils.py | Python | agpl-3.0 | 9,421 | 0.00138 |
# -*- coding: latin-1 -*-
"""Tests for cookielib.py."""
import cookielib
import os
import re
import time
from unittest import TestCase
from test import test_support
class DateTimeTests(TestCase):
def test_time2isoz(self):
from cookielib import time2isoz
base = 1019227000
day = 24*3600
self.assertEqual(time2isoz(base), "2002-04-19 14:36:40Z")
self.assertEqual(time2isoz(base+day), "2002-04-20 14:36:40Z")
self.assertEqual(time2isoz(base+2*day), "2002-04-21 14:36:40Z")
self.assertEqual(time2isoz(base+3*day), "2002-04-22 14:36:40Z")
az = time2isoz()
bz = time2isoz(500000)
for text in (az, bz):
self.assertTrue(re.search(r"^\d{4}-\d\d-\d\d \d\d:\d\d:\d\dZ$", text),
"bad time2isoz format: %s %s" % (az, bz))
def test_http2time(self):
from cookielib import http2time
def parse_date(text):
return time.gmtime(http2time(text))[:6]
self.assertEqual(parse_date("01 Jan 2001"), (2001, 1, 1, 0, 0, 0.0))
# this test will break around year 2070
self.assertEqual(parse_date("03-Feb-20"), (2020, 2, 3, 0, 0, 0.0))
# this test will break around year 2048
self.assertEqual(parse_date("03-Feb-98"), (1998, 2, 3, 0, 0, 0.0))
def test_http2time_formats(self):
from cookielib import http2time, time2isoz
# test http2time for supported dates. Test cases with 2 digit year
# will probably break in year 2044.
tests = [
'Thu, 03 Feb 1994 00:00:00 GMT', # proposed new HTTP format
'Thursday, 03-Feb-94 00:00:00 GMT', # old rfc850 HTTP format
'Thursday, 03-Feb-1994 00:00:00 GMT', # broken rfc850 HTTP format
'03 Feb 1994 00:00:00 GMT', # HTTP format (no weekday)
'03-Feb-94 00:00:00 GMT', # old rfc850 (no weekday)
'03-Feb-1994 00:00:00 GMT', # broken rfc850 (no weekday)
'03-Feb-1994 00:00 GMT', # broken rfc850 (no weekday, no seconds)
'03-Feb-1994 00:00', # broken rfc850 (no weekday, no seconds, no tz)
'03-Feb-94', # old rfc850 HTTP format (no weekday, no time)
'03-Feb-1994', # broken rfc850 HTTP format (no weekday, no time)
'03 Feb 1994', # proposed new HTTP format (no weekday, no time)
# A few tests with extra space at various places
' 03 Feb 1994 0:00 ',
' 03-Feb-1994 ',
]
test_t = 760233600 # assume broken POSIX counting of seconds
result = time2isoz(test_t)
expected = "1994-02-03 00:00:00Z"
self.assertEqual(result, expected,
"%s => '%s' (%s)" % (test_t, result, expected))
for s in tests:
t = http2time(s)
t2 = http2time(s.lower())
t3 = http2time(s.upper())
self.assertTrue(t == t2 == t3 == test_t,
"'%s' => %s, %s, %s (%s)" % (s, t, t2, t3, test_t))
def test_http2time_garbage(self):
from cookielib import http2time
for test in [
'',
'Garbage',
'Mandag 16. September 1996',
'01-00-1980',
'01-13-1980',
'00-01-1980',
'32-01-1980',
'01-01-1980 25:00:00',
'01-01-1980 00:61:00',
'01-01-1980 00:00:62',
]:
self.assertTrue(http2time(test) is None,
"http2time(%s) is not None\n"
"http2time(test) %s" % (test, http2time(test))
)
class HeaderTests(TestCase):
def test_parse_ns_headers_expires(self):
from cookielib import parse_ns_headers
# quotes should be stripped
expected = [[('foo', 'bar'), ('expires', 2209069412L), ('version', '0')]]
for hdr in [
'foo=bar; expires=01 Jan 2040 22:23:32 GMT',
'foo=bar; expires="01 Jan 2040 22:23:32 GMT"',
]:
self.assertEqual(parse_ns_headers([hdr]), expected)
def test_parse_ns_headers_version(self):
from cookielib import parse_ns_headers
# quotes should be stripped
expected = [[('foo', 'bar'), ('version', '1')]]
for hdr in [
'foo=bar; version="1"',
'foo=bar; Version="1"',
]:
self.assertEqual(parse_ns_headers([hdr]), expected)
def test_parse_ns_headers_special_names(self):
# names such as 'expires' are not special in first name=value pair
# of Set-Cookie: header
from cookielib import parse_ns_headers
# Cookie with name 'expires'
hdr = 'expires=01 Jan 2040 22:23:32 GMT'
expected = [[("expires", "01 Jan 2040 22:23:32 GMT"), ("version", "0")]]
self.assertEqual(parse_ns_headers([hdr]), expected)
def test_join_header_words(self):
from cookielib import join_header_words
joined = join_header_words([[("foo", None), ("bar", "baz")]])
self.assertEqual(joined, "foo; bar=baz")
self.assertEqual(join_header_words([[]]), "")
def test_split_header_words(self):
from cookielib import split_header_words
tests = [
("foo", [[("foo", None)]]),
("foo=bar", [[("foo", "bar")]]),
(" foo ", [[("foo", None)]]),
(" foo= ", [[("foo", "")]]),
(" foo=", [[("foo", "")]]),
(" foo= ; ", [[("foo", "")]]),
(" foo= ; bar= baz ", [[("foo", ""), ("bar", "baz")]]),
("foo=bar bar=baz", [[("foo", "bar"), ("bar", "baz")]]),
# doesn't really matter if this next fails, but it works ATM
("foo= bar=baz", [[("foo", "bar=baz")]]),
("foo=bar;bar=baz", [[("foo", "bar"), ("bar", "baz")]]),
('foo bar baz', [[("foo", None), ("bar", None), ("baz", None)]]),
("a, b, c", [[("a", None)], [("b", None)], [("c", None)]]),
(r'foo; bar=baz, spam=, foo="\,\;\"", bar= ',
[[("foo", None), ("bar", "baz")],
[("spam", "")], [("foo", ',;"')], [("bar", "")]]),
]
for arg, expect in tests:
try:
result = split_header_words([arg])
except:
import traceback, StringIO
f = StringIO.StringIO()
traceback.print_exc(None, f)
result = "(error -- traceback follows)\n\n%s" % f.getvalue()
self.assertEqual(result, expect, """
When parsing: '%s'
Expected: '%s'
Got: '%s'
""" % (arg, expect, result))
def test_roundtrip(self):
from cookielib import split_header_words, join_header_words
tests = [
("foo", "foo"),
("foo=bar", "foo=bar"),
(" foo ", "foo"),
("foo=", 'foo=""'),
("foo=bar bar=baz", "foo=bar; bar=baz"),
("foo=bar;bar=baz", "foo=bar; bar=baz"),
('foo bar baz', "foo; bar; baz"),
(r'foo="\"" bar="\\"', r'foo="\""; bar="\\"'),
('foo,,,bar', 'foo, bar'),
('foo=bar,bar=baz', 'foo=bar, bar=baz'),
('text/html; charset=iso-8859-1',
'text/html; charset="iso-8859-1"'),
('foo="bar"; port="80,81"; discard, bar=baz',
'foo=bar; port="80,81"; discard, bar=baz'),
(r'Basic realm="\"foo\\\\bar\""',
r'Basic; realm="\"foo\\\\bar\""')
]
for arg, expect in tests:
input = split_header_words([arg])
res = join_header_words(input)
self.assertEqual(res, expect, """
When parsing: '%s'
Expected: '%s'
Got: '%s'
Input was: '%s'
""" % (arg, expect, res, input))
class FakeResponse:
def __init__(self, headers=[], url=None):
"""
headers: list of RFC822-style 'Key: value' strings
"""
import mimetools, StringIO
f = StringIO.StringIO("\n".join(headers))
self._headers = mimetools.Message(f)
self._url = url
def info(self): return self._headers
def interact_2965(cookiejar, url, *set_cookie_hdrs):
return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie2")
def interact_netscape(cookiejar, url, *set_cookie_hdrs):
return _interact(cookiejar, url, set_cookie_hdrs, "Set-Cookie")
def _interact(cookiejar, url, set_cookie_hdrs, hdr_name):
"""Perform a single request / response cycle, returning Cookie: header."""
from urllib2 import Request
req = Request(url)
cookiejar.add_cookie_header(req)
cookie_hdr = req.get_header("Cookie", "")
headers = []
for hdr in set_cookie_hdrs:
headers.append("%s: %s" % (hdr_name, hdr))
res = FakeResponse(headers, url)
cookiejar.extract_cookies(res, req)
return cookie_hdr
class FileCookieJarTests(TestCase):
def test_lwp_valueless_cookie(self):
# cookies with no value should be saved and loaded consistently
from cookielib import LWPCookieJar
filename = test_support.TESTFN
c = LWPCookieJar()
interact_netscape(c, "http://www.acme.com/", 'boo')
self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
try:
c.save(filename, ignore_discard=True)
c = LWPCookieJar()
c.load(filename, ignore_discard=True)
finally:
try: os.unlink(filename)
except OSError: pass
self.assertEqual(c._cookies["www.acme.com"]["/"]["boo"].value, None)
def test_bad_magic(self):
from cookielib import LWPCookieJar, MozillaCookieJar, LoadError
# IOErrors (eg. file doesn't exist) are allowed to propagate
filename = test_support.TESTFN
for cookiejar_class in LWPCookieJar, MozillaCookieJar:
c = cookiejar_class()
try:
c.load(filename="for this test to work, a file with this "
"filename should not exist")
except IOError, exc:
# exactly IOError, not LoadError
self.assertEqual(exc.__class__, IOError)
else:
self.fail("expected IOError for invalid filename")
# Invalid contents of cookies file (eg. bad magic string)
# causes a LoadError.
try:
f = open(filename, "w")
f.write("oops\n")
for cookiejar_class in LWPCookieJar, MozillaCookieJar:
c = cookiejar_class()
self.assertRaises(LoadError, c.load, filename)
finally:
try: os.unlink(filename)
except OSError: pass
class CookieTests(TestCase):
# XXX
# Get rid of string comparisons where not actually testing str / repr.
# .clear() etc.
# IP addresses like 50 (single number, no dot) and domain-matching
# functions (and is_HDN)? See draft RFC 2965 errata.
# Strictness switches
# is_third_party()
# unverifiability / third-party blocking
# Netscape cookies work the same as RFC 2965 with regard to port.
# Set-Cookie with negative max age.
# If turn RFC 2965 handling off, Set-Cookie2 cookies should not clobber
# Set-Cookie cookies.
# Cookie2 should be sent if *any* cookies are not V1 (ie. V0 OR V2 etc.).
# Cookies (V1 and V0) with no expiry date should be set to be discarded.
# RFC 2965 Quoting:
# Should accept unquoted cookie-attribute values? check errata draft.
# Which are required on the way in and out?
# Should always return quoted cookie-attribute values?
# Proper testing of when RFC 2965 clobbers Netscape (waiting for errata).
# Path-match on return (same for V0 and V1).
# RFC 2965 acceptance and returning rules
# Set-Cookie2 without version attribute is rejected.
# Netscape peculiarities list from Ronald Tschalar.
# The first two still need tests, the rest are covered.
## - Quoting: only quotes around the expires value are recognized as such
## (and yes, some folks quote the expires value); quotes around any other
## value are treated as part of the value.
## - White space: white space around names and values is ignored
## - Default path: if no path parameter is given, the path defaults to the
## path in the request-uri up to, but not including, the last '/'. Note
## that this is entirely different from what the spec says.
## - Commas and other delimiters: Netscape just parses until the next ';'.
## This means it will allow commas etc inside values (and yes, both
## commas and equals are commonly appear in the cookie value). This also
## means that if you fold multiple Set-Cookie header fields into one,
## comma-separated list, it'll be a headache to parse (at least my head
## starts hurting everytime I think of that code).
## - Expires: You'll get all sorts of date formats in the expires,
## including emtpy expires attributes ("expires="). Be as flexible as you
## can, and certainly don't expect the weekday to be there; if you can't
## parse it, just ignore it and pretend it's a session cookie.
## - Domain-matching: Netscape uses the 2-dot rule for _all_ domains, not
## just the 7 special TLD's listed in their spec. And folks rely on
## that...
def test_domain_return_ok(self):
# test optimization: .domain_return_ok() should filter out most
# domains in the CookieJar before we try to access them (because that
# may require disk access -- in particular, with MSIECookieJar)
# This is only a rough check for performance reasons, so it's not too
# critical as long as it's sufficiently liberal.
import cookielib, urllib2
pol = cookielib.DefaultCookiePolicy()
for url, domain, ok in [
("http://foo.bar.com/", "blah.com", False),
("http://foo.bar.com/", "rhubarb.blah.com", False),
("http://foo.bar.com/", "rhubarb.foo.bar.com", False),
("http://foo.bar.com/", ".foo.bar.com", True),
("http://foo.bar.com/", "foo.bar.com", True),
("http://foo.bar.com/", ".bar.com", True),
("http://foo.bar.com/", "com", True),
("http://foo.com/", "rhubarb.foo.com", False),
("http://foo.com/", ".foo.com", True),
("http://foo.com/", "foo.com", True),
("http://foo.com/", "com", True),
("http://foo/", "rhubarb.foo", False),
("http://foo/", ".foo", True),
("http://foo/", "foo", True),
("http://foo/", "foo.local", True),
("http://foo/", ".local", True),
]:
request = urllib2.Request(url)
r = pol.domain_return_ok(domain, request)
if ok: self.assertTrue(r)
else: self.assertTrue(not r)
def test_missing_value(self):
from cookielib import MozillaCookieJar, lwp_cookie_str
# missing = sign in Cookie: header is regarded by Mozilla as a missing
# name, and by cookielib as a missing value
filename = test_support.TESTFN
c = MozillaCookieJar(filename)
interact_netscape(c, "http://www.acme.com/", 'eggs')
interact_netscape(c, "http://www.acme.com/", '"spam"; path=/foo/')
cookie = c._cookies["www.acme.com"]["/"]["eggs"]
self.assertTrue(cookie.value is None)
self.assertEqual(cookie.name, "eggs")
cookie = c._cookies["www.acme.com"]['/foo/']['"spam"']
self.assertTrue(cookie.value is None)
self.assertEqual(cookie.name, '"spam"')
self.assertEqual(lwp_cookie_str(cookie), (
r'"spam"; path="/foo/"; domain="www.acme.com"; '
'path_spec; discard; version=0'))
old_str = repr(c)
c.save(ignore_expires=True, ignore_discard=True)
try:
c = MozillaCookieJar(filename)
c.revert(ignore_expires=True, ignore_discard=True)
finally:
os.unlink(c.filename)
# cookies unchanged apart from lost info re. whether path was specified
self.assertEqual(
repr(c),
re.sub("path_specified=%s" % True, "path_specified=%s" % False,
old_str)
)
self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"),
'"spam"; eggs')
def test_rfc2109_handling(self):
# RFC 2109 cookies are handled as RFC 2965 or Netscape cookies,
# dependent on policy settings
from cookielib import CookieJar, DefaultCookiePolicy
for rfc2109_as_netscape, rfc2965, version in [
# default according to rfc2965 if not explicitly specified
(None, False, 0),
(None, True, 1),
# explicit rfc2109_as_netscape
(False, False, None), # version None here means no cookie stored
(False, True, 1),
(True, False, 0),
(True, True, 0),
]:
policy = DefaultCookiePolicy(
rfc2109_as_netscape=rfc2109_as_netscape,
rfc2965=rfc2965)
c = CookieJar(policy)
interact_netscape(c, "http://www.example.com/", "ni=ni; Version=1")
try:
cookie = c._cookies["www.example.com"]["/"]["ni"]
except KeyError:
self.assertTrue(version is None) # didn't expect a stored cookie
else:
self.assertEqual(cookie.version, version)
# 2965 cookies are unaffected
interact_2965(c, "http://www.example.com/",
"foo=bar; Version=1")
if rfc2965:
cookie2965 = c._cookies["www.example.com"]["/"]["foo"]
self.assertEqual(cookie2965.version, 1)
def test_ns_parser(self):
from cookielib import CookieJar, DEFAULT_HTTP_PORT
c = CookieJar()
interact_netscape(c, "http://www.acme.com/",
'spam=eggs; DoMain=.acme.com; port; blArgh="feep"')
interact_netscape(c, "http://www.acme.com/", 'ni=ni; port=80,8080')
interact_netscape(c, "http://www.acme.com:80/", 'nini=ni')
interact_netscape(c, "http://www.acme.com:80/", 'foo=bar; expires=')
interact_netscape(c, "http://www.acme.com:80/", 'spam=eggs; '
'expires="Foo Bar 25 33:22:11 3022"')
cookie = c._cookies[".acme.com"]["/"]["spam"]
self.assertEqual(cookie.domain, ".acme.com")
self.assertTrue(cookie.domain_specified)
self.assertEqual(cookie.port, DEFAULT_HTTP_PORT)
self.assertTrue(not cookie.port_specified)
# case is preserved
self.assertTrue(cookie.has_nonstandard_attr("blArgh") and
not cookie.has_nonstandard_attr("blargh"))
cookie = c._cookies["www.acme.com"]["/"]["ni"]
self.assertEqual(cookie.domain, "www.acme.com")
self.assertTrue(not cookie.domain_specified)
self.assertEqual(cookie.port, "80,8080")
self.assertTrue(cookie.port_specified)
cookie = c._cookies["www.acme.com"]["/"]["nini"]
self.assertTrue(cookie.port is None)
self.assertTrue(not cookie.port_specified)
# invalid expires should not cause cookie to be dropped
foo = c._cookies["www.acme.com"]["/"]["foo"]
spam = c._cookies["www.acme.com"]["/"]["foo"]
self.assertTrue(foo.expires is None)
self.assertTrue(spam.expires is None)
def test_ns_parser_special_names(self):
# names such as 'expires' are not special in first name=value pair
# of Set-Cookie: header
from cookielib import CookieJar
c = CookieJar()
interact_netscape(c, "http://www.acme.com/", 'expires=eggs')
interact_netscape(c, "http://www.acme.com/", 'version=eggs; spam=eggs')
cookies = c._cookies["www.acme.com"]["/"]
self.assertTrue('expires' in cookies)
self.assertTrue('version' in cookies)
def test_expires(self):
from cookielib import time2netscape, CookieJar
# if expires is in future, keep cookie...
c = CookieJar()
future = time2netscape(time.time()+3600)
interact_netscape(c, "http://www.acme.com/", 'spam="bar"; expires=%s' %
future)
self.assertEqual(len(c), 1)
now = time2netscape(time.time()-1)
# ... and if in past or present, discard it
interact_netscape(c, "http://www.acme.com/", 'foo="eggs"; expires=%s' %
now)
h = interact_netscape(c, "http://www.acme.com/")
self.assertEqual(len(c), 1)
self.assertTrue('spam="bar"' in h and "foo" not in h)
# max-age takes precedence over expires, and zero max-age is request to
# delete both new cookie and any old matching cookie
interact_netscape(c, "http://www.acme.com/", 'eggs="bar"; expires=%s' %
future)
interact_netscape(c, "http://www.acme.com/", 'bar="bar"; expires=%s' %
future)
self.assertEqual(len(c), 3)
interact_netscape(c, "http://www.acme.com/", 'eggs="bar"; '
'expires=%s; max-age=0' % future)
interact_netscape(c, "http://www.acme.com/", 'bar="bar"; '
'max-age=0; expires=%s' % future)
h = interact_netscape(c, "http://www.acme.com/")
self.assertEqual(len(c), 1)
# test expiry at end of session for cookies with no expires attribute
interact_netscape(c, "http://www.rhubarb.net/", 'whum="fizz"')
self.assertEqual(len(c), 2)
c.clear_session_cookies()
self.assertEqual(len(c), 1)
self.assertIn('spam="bar"', h)
# XXX RFC 2965 expiry rules (some apply to V0 too)
def test_default_path(self):
from cookielib import CookieJar, DefaultCookiePolicy
# RFC 2965
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/", 'spam="bar"; Version="1"')
self.assertIn("/", c._cookies["www.acme.com"])
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/blah", 'eggs="bar"; Version="1"')
self.assertIn("/", c._cookies["www.acme.com"])
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/blah/rhubarb",
'eggs="bar"; Version="1"')
self.assertIn("/blah/", c._cookies["www.acme.com"])
c = CookieJar(pol)
interact_2965(c, "http://www.acme.com/blah/rhubarb/",
'eggs="bar"; Version="1"')
self.assertIn("/blah/rhubarb/", c._cookies["www.acme.com"])
# Netscape
c = CookieJar()
interact_netscape(c, "http://www.acme.com/", 'spam="bar"')
self.assertIn("/", c._cookies["www.acme.com"])
c = CookieJar()
interact_netscape(c, "http://www.acme.com/blah", 'eggs="bar"')
self.assertIn("/", c._cookies["www.acme.com"])
c = CookieJar()
interact_netscape(c, "http://www.acme.com/blah/rhubarb", 'eggs="bar"')
self.assertIn("/blah", c._cookies["www.acme.com"])
c = CookieJar()
interact_netscape(c, "http://www.acme.com/blah/rhubarb/", 'eggs="bar"')
self.assertIn("/blah/rhubarb", c._cookies["www.acme.com"])
def test_default_path_with_query(self):
cj = cookielib.CookieJar()
uri = "http://example.com/?spam/eggs"
value = 'eggs="bar"'
interact_netscape(cj, uri, value)
# default path does not include query, so is "/", not "/?spam"
self.assertIn("/", cj._cookies["example.com"])
# cookie is sent back to the same URI
self.assertEqual(interact_netscape(cj, uri), value)
def test_escape_path(self):
from cookielib import escape_path
cases = [
# quoted safe
("/foo%2f/bar", "/foo%2F/bar"),
("/foo%2F/bar", "/foo%2F/bar"),
# quoted %
("/foo%%/bar", "/foo%%/bar"),
# quoted unsafe
("/fo%19o/bar", "/fo%19o/bar"),
("/fo%7do/bar", "/fo%7Do/bar"),
# unquoted safe
("/foo/bar&", "/foo/bar&"),
("/foo//bar", "/foo//bar"),
("\176/foo/bar", "\176/foo/bar"),
# unquoted unsafe
("/foo\031/bar", "/foo%19/bar"),
("/\175foo/bar", "/%7Dfoo/bar"),
# unicode
(u"/foo/bar\uabcd", "/foo/bar%EA%AF%8D"), # UTF-8 encoded
]
for arg, result in cases:
self.assertEqual(escape_path(arg), result)
def test_request_path(self):
from urllib2 import Request
from cookielib import request_path
# with parameters
req = Request("http://www.example.com/rheum/rhaponticum;"
"foo=bar;sing=song?apples=pears&spam=eggs#ni")
self.assertEqual(request_path(req),
"/rheum/rhaponticum;foo=bar;sing=song")
# without parameters
req = Request("http://www.example.com/rheum/rhaponticum?"
"apples=pears&spam=eggs#ni")
self.assertEqual(request_path(req), "/rheum/rhaponticum")
# missing final slash
req = Request("http://www.example.com")
self.assertEqual(request_path(req), "/")
def test_request_port(self):
from urllib2 import Request
from cookielib import request_port, DEFAULT_HTTP_PORT
req = Request("http://www.acme.com:1234/",
headers={"Host": "www.acme.com:4321"})
self.assertEqual(request_port(req), "1234")
req = Request("http://www.acme.com/",
headers={"Host": "www.acme.com:4321"})
self.assertEqual(request_port(req), DEFAULT_HTTP_PORT)
def test_request_host(self):
from urllib2 import Request
from cookielib import request_host
# this request is illegal (RFC2616, 14.2.3)
req = Request("http://1.1.1.1/",
headers={"Host": "www.acme.com:80"})
# libwww-perl wants this response, but that seems wrong (RFC 2616,
# section 5.2, point 1., and RFC 2965 section 1, paragraph 3)
#self.assertEqual(request_host(req), "www.acme.com")
self.assertEqual(request_host(req), "1.1.1.1")
req = Request("http://www.acme.com/",
headers={"Host": "irrelevant.com"})
self.assertEqual(request_host(req), "www.acme.com")
# not actually sure this one is valid Request object, so maybe should
# remove test for no host in url in request_host function?
req = Request("/resource.html",
headers={"Host": "www.acme.com"})
self.assertEqual(request_host(req), "www.acme.com")
# port shouldn't be in request-host
req = Request("http://www.acme.com:2345/resource.html",
headers={"Host": "www.acme.com:5432"})
self.assertEqual(request_host(req), "www.acme.com")
def test_is_HDN(self):
from cookielib import is_HDN
self.assertTrue(is_HDN("foo.bar.com"))
self.assertTrue(is_HDN("1foo2.3bar4.5com"))
self.assertTrue(not is_HDN("192.168.1.1"))
self.assertTrue(not is_HDN(""))
self.assertTrue(not is_HDN("."))
self.assertTrue(not is_HDN(".foo.bar.com"))
self.assertTrue(not is_HDN("..foo"))
self.assertTrue(not is_HDN("foo."))
def test_reach(self):
from cookielib import reach
self.assertEqual(reach("www.acme.com"), ".acme.com")
self.assertEqual(reach("acme.com"), "acme.com")
self.assertEqual(reach("acme.local"), ".local")
self.assertEqual(reach(".local"), ".local")
self.assertEqual(reach(".com"), ".com")
self.assertEqual(reach("."), ".")
self.assertEqual(reach(""), "")
self.assertEqual(reach("192.168.0.1"), "192.168.0.1")
def test_domain_match(self):
from cookielib import domain_match, user_domain_match
self.assertTrue(domain_match("192.168.1.1", "192.168.1.1"))
self.assertTrue(not domain_match("192.168.1.1", ".168.1.1"))
self.assertTrue(domain_match("x.y.com", "x.Y.com"))
self.assertTrue(domain_match("x.y.com", ".Y.com"))
self.assertTrue(not domain_match("x.y.com", "Y.com"))
self.assertTrue(domain_match("a.b.c.com", ".c.com"))
self.assertTrue(not domain_match(".c.com", "a.b.c.com"))
self.assertTrue(domain_match("example.local", ".local"))
self.assertTrue(not domain_match("blah.blah", ""))
self.assertTrue(not domain_match("", ".rhubarb.rhubarb"))
self.assertTrue(domain_match("", ""))
self.assertTrue(user_domain_match("acme.com", "acme.com"))
self.assertTrue(not user_domain_match("acme.com", ".acme.com"))
self.assertTrue(user_domain_match("rhubarb.acme.com", ".acme.com"))
self.assertTrue(user_domain_match("www.rhubarb.acme.com", ".acme.com"))
self.assertTrue(user_domain_match("x.y.com", "x.Y.com"))
self.assertTrue(user_domain_match("x.y.com", ".Y.com"))
self.assertTrue(not user_domain_match("x.y.com", "Y.com"))
self.assertTrue(user_domain_match("y.com", "Y.com"))
self.assertTrue(not user_domain_match(".y.com", "Y.com"))
self.assertTrue(user_domain_match(".y.com", ".Y.com"))
self.assertTrue(user_domain_match("x.y.com", ".com"))
self.assertTrue(not user_domain_match("x.y.com", "com"))
self.assertTrue(not user_domain_match("x.y.com", "m"))
self.assertTrue(not user_domain_match("x.y.com", ".m"))
self.assertTrue(not user_domain_match("x.y.com", ""))
self.assertTrue(not user_domain_match("x.y.com", "."))
self.assertTrue(user_domain_match("192.168.1.1", "192.168.1.1"))
# not both HDNs, so must string-compare equal to match
self.assertTrue(not user_domain_match("192.168.1.1", ".168.1.1"))
self.assertTrue(not user_domain_match("192.168.1.1", "."))
# empty string is a special case
self.assertTrue(not user_domain_match("192.168.1.1", ""))
def test_wrong_domain(self):
# Cookies whose effective request-host name does not domain-match the
# domain are rejected.
# XXX far from complete
from cookielib import CookieJar
c = CookieJar()
interact_2965(c, "http://www.nasty.com/",
'foo=bar; domain=friendly.org; Version="1"')
self.assertEqual(len(c), 0)
def test_strict_domain(self):
# Cookies whose domain is a country-code tld like .co.uk should
# not be set if CookiePolicy.strict_domain is true.
from cookielib import CookieJar, DefaultCookiePolicy
cp = DefaultCookiePolicy(strict_domain=True)
cj = CookieJar(policy=cp)
interact_netscape(cj, "http://example.co.uk/", 'no=problemo')
interact_netscape(cj, "http://example.co.uk/",
'okey=dokey; Domain=.example.co.uk')
self.assertEqual(len(cj), 2)
for pseudo_tld in [".co.uk", ".org.za", ".tx.us", ".name.us"]:
interact_netscape(cj, "http://example.%s/" % pseudo_tld,
'spam=eggs; Domain=.co.uk')
self.assertEqual(len(cj), 2)
def test_two_component_domain_ns(self):
# Netscape: .www.bar.com, www.bar.com, .bar.com, bar.com, no domain
# should all get accepted, as should .acme.com, acme.com and no domain
# for 2-component domains like acme.com.
from cookielib import CookieJar, DefaultCookiePolicy
c = CookieJar()
# two-component V0 domain is OK
interact_netscape(c, "http://foo.net/", 'ns=bar')
self.assertEqual(len(c), 1)
self.assertEqual(c._cookies["foo.net"]["/"]["ns"].value, "bar")
self.assertEqual(interact_netscape(c, "http://foo.net/"), "ns=bar")
# *will* be returned to any other domain (unlike RFC 2965)...
self.assertEqual(interact_netscape(c, "http://www.foo.net/"),
"ns=bar")
# ...unless requested otherwise
pol = DefaultCookiePolicy(
strict_ns_domain=DefaultCookiePolicy.DomainStrictNonDomain)
c.set_policy(pol)
self.assertEqual(interact_netscape(c, "http://www.foo.net/"), "")
# unlike RFC 2965, even explicit two-component domain is OK,
# because .foo.net matches foo.net
interact_netscape(c, "http://foo.net/foo/",
'spam1=eggs; domain=foo.net')
# even if starts with a dot -- in NS rules, .foo.net matches foo.net!
interact_netscape(c, "http://foo.net/foo/bar/",
'spam2=eggs; domain=.foo.net')
self.assertEqual(len(c), 3)
self.assertEqual(c._cookies[".foo.net"]["/foo"]["spam1"].value,
"eggs")
self.assertEqual(c._cookies[".foo.net"]["/foo/bar"]["spam2"].value,
"eggs")
self.assertEqual(interact_netscape(c, "http://foo.net/foo/bar/"),
"spam2=eggs; spam1=eggs; ns=bar")
# top-level domain is too general
interact_netscape(c, "http://foo.net/", 'nini="ni"; domain=.net')
self.assertEqual(len(c), 3)
## # Netscape protocol doesn't allow non-special top level domains (such
## # as co.uk) in the domain attribute unless there are at least three
## # dots in it.
# Oh yes it does! Real implementations don't check this, and real
# cookies (of course) rely on that behaviour.
interact_netscape(c, "http://foo.co.uk", 'nasty=trick; domain=.co.uk')
## self.assertEqual(len(c), 2)
self.assertEqual(len(c), 4)
def test_two_component_domain_rfc2965(self):
from cookielib import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
# two-component V1 domain is OK
interact_2965(c, "http://foo.net/", 'foo=bar; Version="1"')
self.assertEqual(len(c), 1)
self.assertEqual(c._cookies["foo.net"]["/"]["foo"].value, "bar")
self.assertEqual(interact_2965(c, "http://foo.net/"),
"$Version=1; foo=bar")
# won't be returned to any other domain (because domain was implied)
self.assertEqual(interact_2965(c, "http://www.foo.net/"), "")
# unless domain is given explicitly, because then it must be
# rewritten to start with a dot: foo.net --> .foo.net, which does
# not domain-match foo.net
interact_2965(c, "http://foo.net/foo",
'spam=eggs; domain=foo.net; path=/foo; Version="1"')
self.assertEqual(len(c), 1)
self.assertEqual(interact_2965(c, "http://foo.net/foo"),
"$Version=1; foo=bar")
# explicit foo.net from three-component domain www.foo.net *does* get
# set, because .foo.net domain-matches .foo.net
interact_2965(c, "http://www.foo.net/foo/",
'spam=eggs; domain=foo.net; Version="1"')
self.assertEqual(c._cookies[".foo.net"]["/foo/"]["spam"].value,
"eggs")
self.assertEqual(len(c), 2)
self.assertEqual(interact_2965(c, "http://foo.net/foo/"),
"$Version=1; foo=bar")
self.assertEqual(interact_2965(c, "http://www.foo.net/foo/"),
'$Version=1; spam=eggs; $Domain="foo.net"')
# top-level domain is too general
interact_2965(c, "http://foo.net/",
'ni="ni"; domain=".net"; Version="1"')
self.assertEqual(len(c), 2)
# RFC 2965 doesn't require blocking this
interact_2965(c, "http://foo.co.uk/",
'nasty=trick; domain=.co.uk; Version="1"')
self.assertEqual(len(c), 3)
def test_domain_allow(self):
from cookielib import CookieJar, DefaultCookiePolicy
from urllib2 import Request
c = CookieJar(policy=DefaultCookiePolicy(
blocked_domains=["acme.com"],
allowed_domains=["www.acme.com"]))
req = Request("http://acme.com/")
headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"]
res = FakeResponse(headers, "http://acme.com/")
c.extract_cookies(res, req)
self.assertEqual(len(c), 0)
req = Request("http://www.acme.com/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
self.assertEqual(len(c), 1)
req = Request("http://www.coyote.com/")
res = FakeResponse(headers, "http://www.coyote.com/")
c.extract_cookies(res, req)
self.assertEqual(len(c), 1)
# set a cookie with non-allowed domain...
req = Request("http://www.coyote.com/")
res = FakeResponse(headers, "http://www.coyote.com/")
cookies = c.make_cookies(res, req)
c.set_cookie(cookies[0])
self.assertEqual(len(c), 2)
# ... and check is doesn't get returned
c.add_cookie_header(req)
self.assertTrue(not req.has_header("Cookie"))
def test_domain_block(self):
from cookielib import CookieJar, DefaultCookiePolicy
from urllib2 import Request
pol = DefaultCookiePolicy(
rfc2965=True, blocked_domains=[".acme.com"])
c = CookieJar(policy=pol)
headers = ["Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/"]
req = Request("http://www.acme.com/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
self.assertEqual(len(c), 0)
p = pol.set_blocked_domains(["acme.com"])
c.extract_cookies(res, req)
self.assertEqual(len(c), 1)
c.clear()
req = Request("http://www.roadrunner.net/")
res = FakeResponse(headers, "http://www.roadrunner.net/")
c.extract_cookies(res, req)
self.assertEqual(len(c), 1)
req = Request("http://www.roadrunner.net/")
c.add_cookie_header(req)
self.assertTrue((req.has_header("Cookie") and
req.has_header("Cookie2")))
c.clear()
pol.set_blocked_domains([".acme.com"])
c.extract_cookies(res, req)
self.assertEqual(len(c), 1)
# set a cookie with blocked domain...
req = Request("http://www.acme.com/")
res = FakeResponse(headers, "http://www.acme.com/")
cookies = c.make_cookies(res, req)
c.set_cookie(cookies[0])
self.assertEqual(len(c), 2)
# ... and check is doesn't get returned
c.add_cookie_header(req)
self.assertTrue(not req.has_header("Cookie"))
def test_secure(self):
from cookielib import CookieJar, DefaultCookiePolicy
for ns in True, False:
for whitespace in " ", "":
c = CookieJar()
if ns:
pol = DefaultCookiePolicy(rfc2965=False)
int = interact_netscape
vs = ""
else:
pol = DefaultCookiePolicy(rfc2965=True)
int = interact_2965
vs = "; Version=1"
c.set_policy(pol)
url = "http://www.acme.com/"
int(c, url, "foo1=bar%s%s" % (vs, whitespace))
int(c, url, "foo2=bar%s; secure%s" % (vs, whitespace))
self.assertTrue(
not c._cookies["www.acme.com"]["/"]["foo1"].secure,
"non-secure cookie registered secure")
self.assertTrue(
c._cookies["www.acme.com"]["/"]["foo2"].secure,
"secure cookie registered non-secure")
def test_quote_cookie_value(self):
from cookielib import CookieJar, DefaultCookiePolicy
c = CookieJar(policy=DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://www.acme.com/", r'foo=\b"a"r; Version=1')
h = interact_2965(c, "http://www.acme.com/")
self.assertEqual(h, r'$Version=1; foo=\\b\"a\"r')
def test_missing_final_slash(self):
# Missing slash from request URL's abs_path should be assumed present.
from cookielib import CookieJar, DefaultCookiePolicy
from urllib2 import Request
url = "http://www.acme.com"
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
interact_2965(c, url, "foo=bar; Version=1")
req = Request(url)
self.assertEqual(len(c), 1)
c.add_cookie_header(req)
self.assertTrue(req.has_header("Cookie"))
def test_domain_mirror(self):
from cookielib import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1")
h = interact_2965(c, url)
self.assertNotIn("Domain", h,
"absent domain returned with domain present")
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Domain=.bar.com')
h = interact_2965(c, url)
self.assertIn('$Domain=".bar.com"', h, "domain not returned")
c = CookieJar(pol)
url = "http://foo.bar.com/"
# note missing initial dot in Domain
interact_2965(c, url, 'spam=eggs; Version=1; Domain=bar.com')
h = interact_2965(c, url)
self.assertIn('$Domain="bar.com"', h, "domain not returned")
def test_path_mirror(self):
from cookielib import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1")
h = interact_2965(c, url)
self.assertNotIn("Path", h, "absent path returned with path present")
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Path=/')
h = interact_2965(c, url)
self.assertIn('$Path="/"', h, "path not returned")
def test_port_mirror(self):
from cookielib import CookieJar, DefaultCookiePolicy
pol = DefaultCookiePolicy(rfc2965=True)
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1")
h = interact_2965(c, url)
self.assertNotIn("Port", h, "absent port returned with port present")
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, "spam=eggs; Version=1; Port")
h = interact_2965(c, url)
self.assertTrue(re.search("\$Port([^=]|$)", h),
"port with no value not returned with no value")
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Port="80"')
h = interact_2965(c, url)
self.assertIn('$Port="80"', h,
"port with single value not returned with single value")
c = CookieJar(pol)
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; Port="80,8080"')
h = interact_2965(c, url)
self.assertIn('$Port="80,8080"', h,
"port with multiple values not returned with multiple "
"values")
def test_no_return_comment(self):
from cookielib import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
url = "http://foo.bar.com/"
interact_2965(c, url, 'spam=eggs; Version=1; '
'Comment="does anybody read these?"; '
'CommentURL="http://foo.bar.net/comment.html"')
h = interact_2965(c, url)
self.assertTrue(
"Comment" not in h,
"Comment or CommentURL cookie-attributes returned to server")
def test_Cookie_iterator(self):
from cookielib import CookieJar, Cookie, DefaultCookiePolicy
cs = CookieJar(DefaultCookiePolicy(rfc2965=True))
# add some random cookies
interact_2965(cs, "http://blah.spam.org/", 'foo=eggs; Version=1; '
'Comment="does anybody read these?"; '
'CommentURL="http://foo.bar.net/comment.html"')
interact_netscape(cs, "http://www.acme.com/blah/", "spam=bar; secure")
interact_2965(cs, "http://www.acme.com/blah/",
"foo=bar; secure; Version=1")
interact_2965(cs, "http://www.acme.com/blah/",
"foo=bar; path=/; Version=1")
interact_2965(cs, "http://www.sol.no",
r'bang=wallop; version=1; domain=".sol.no"; '
r'port="90,100, 80,8080"; '
r'max-age=100; Comment = "Just kidding! (\"|\\\\) "')
versions = [1, 1, 1, 0, 1]
names = ["bang", "foo", "foo", "spam", "foo"]
domains = [".sol.no", "blah.spam.org", "www.acme.com",
"www.acme.com", "www.acme.com"]
paths = ["/", "/", "/", "/blah", "/blah/"]
for i in range(4):
i = 0
for c in cs:
self.assertIsInstance(c, Cookie)
self.assertEqual(c.version, versions[i])
self.assertEqual(c.name, names[i])
self.assertEqual(c.domain, domains[i])
self.assertEqual(c.path, paths[i])
i = i + 1
def test_parse_ns_headers(self):
from cookielib import parse_ns_headers
# missing domain value (invalid cookie)
self.assertEqual(
parse_ns_headers(["foo=bar; path=/; domain"]),
[[("foo", "bar"),
("path", "/"), ("domain", None), ("version", "0")]]
)
# invalid expires value
self.assertEqual(
parse_ns_headers(["foo=bar; expires=Foo Bar 12 33:22:11 2000"]),
[[("foo", "bar"), ("expires", None), ("version", "0")]]
)
# missing cookie value (valid cookie)
self.assertEqual(
parse_ns_headers(["foo"]),
[[("foo", None), ("version", "0")]]
)
# shouldn't add version if header is empty
self.assertEqual(parse_ns_headers([""]), [])
def test_bad_cookie_header(self):
def cookiejar_from_cookie_headers(headers):
from cookielib import CookieJar
from urllib2 import Request
c = CookieJar()
req = Request("http://www.example.com/")
r = FakeResponse(headers, "http://www.example.com/")
c.extract_cookies(r, req)
return c
# none of these bad headers should cause an exception to be raised
for headers in [
["Set-Cookie: "], # actually, nothing wrong with this
["Set-Cookie2: "], # ditto
# missing domain value
["Set-Cookie2: a=foo; path=/; Version=1; domain"],
# bad max-age
["Set-Cookie: b=foo; max-age=oops"],
# bad version
["Set-Cookie: b=foo; version=spam"],
]:
c = cookiejar_from_cookie_headers(headers)
# these bad cookies shouldn't be set
self.assertEqual(len(c), 0)
# cookie with invalid expires is treated as session cookie
headers = ["Set-Cookie: c=foo; expires=Foo Bar 12 33:22:11 2000"]
c = cookiejar_from_cookie_headers(headers)
cookie = c._cookies["www.example.com"]["/"]["c"]
self.assertTrue(cookie.expires is None)
class LWPCookieTests(TestCase):
# Tests taken from libwww-perl, with a few modifications and additions.
def test_netscape_example_1(self):
from cookielib import CookieJar, DefaultCookiePolicy
from urllib2 import Request
#-------------------------------------------------------------------
# First we check that it works for the original example at
# http://www.netscape.com/newsref/std/cookie_spec.html
# Client requests a document, and receives in the response:
#
# Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/; expires=Wednesday, 09-Nov-99 23:12:40 GMT
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE
#
# Client requests a document, and receives in the response:
#
# Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001
#
# Client receives:
#
# Set-Cookie: SHIPPING=FEDEX; path=/fo
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001
#
# When client requests a URL in path "/foo" on this server, it sends:
#
# Cookie: CUSTOMER=WILE_E_COYOTE; PART_NUMBER=ROCKET_LAUNCHER_0001; SHIPPING=FEDEX
#
# The last Cookie is buggy, because both specifications say that the
# most specific cookie must be sent first. SHIPPING=FEDEX is the
# most specific and should thus be first.
year_plus_one = time.localtime()[0] + 1
headers = []
c = CookieJar(DefaultCookiePolicy(rfc2965 = True))
#req = Request("http://1.1.1.1/",
# headers={"Host": "www.acme.com:80"})
req = Request("http://www.acme.com:80/",
headers={"Host": "www.acme.com:80"})
headers.append(
"Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/ ; "
"expires=Wednesday, 09-Nov-%d 23:12:40 GMT" % year_plus_one)
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/")
c.add_cookie_header(req)
self.assertEqual(req.get_header("Cookie"), "CUSTOMER=WILE_E_COYOTE")
self.assertEqual(req.get_header("Cookie2"), '$Version="1"')
headers.append("Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/foo/bar")
c.add_cookie_header(req)
h = req.get_header("Cookie")
self.assertIn("PART_NUMBER=ROCKET_LAUNCHER_0001", h)
self.assertIn("CUSTOMER=WILE_E_COYOTE", h)
headers.append('Set-Cookie: SHIPPING=FEDEX; path=/foo')
res = FakeResponse(headers, "http://www.acme.com")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/")
c.add_cookie_header(req)
h = req.get_header("Cookie")
self.assertIn("PART_NUMBER=ROCKET_LAUNCHER_0001", h)
self.assertIn("CUSTOMER=WILE_E_COYOTE", h)
self.assertNotIn("SHIPPING=FEDEX", h)
req = Request("http://www.acme.com/foo/")
c.add_cookie_header(req)
h = req.get_header("Cookie")
self.assertIn("PART_NUMBER=ROCKET_LAUNCHER_0001", h)
self.assertIn("CUSTOMER=WILE_E_COYOTE", h)
self.assertTrue(h.startswith("SHIPPING=FEDEX;"))
def test_netscape_example_2(self):
from cookielib import CookieJar
from urllib2 import Request
# Second Example transaction sequence:
#
# Assume all mappings from above have been cleared.
#
# Client receives:
#
# Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/
#
# When client requests a URL in path "/" on this server, it sends:
#
# Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001
#
# Client receives:
#
# Set-Cookie: PART_NUMBER=RIDING_ROCKET_0023; path=/ammo
#
# When client requests a URL in path "/ammo" on this server, it sends:
#
# Cookie: PART_NUMBER=RIDING_ROCKET_0023; PART_NUMBER=ROCKET_LAUNCHER_0001
#
# NOTE: There are two name/value pairs named "PART_NUMBER" due to
# the inheritance of the "/" mapping in addition to the "/ammo" mapping.
c = CookieJar()
headers = []
req = Request("http://www.acme.com/")
headers.append("Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/")
c.add_cookie_header(req)
self.assertEqual(req.get_header("Cookie"),
"PART_NUMBER=ROCKET_LAUNCHER_0001")
headers.append(
"Set-Cookie: PART_NUMBER=RIDING_ROCKET_0023; path=/ammo")
res = FakeResponse(headers, "http://www.acme.com/")
c.extract_cookies(res, req)
req = Request("http://www.acme.com/ammo")
c.add_cookie_header(req)
self.assertTrue(re.search(r"PART_NUMBER=RIDING_ROCKET_0023;\s*"
"PART_NUMBER=ROCKET_LAUNCHER_0001",
req.get_header("Cookie")))
def test_ietf_example_1(self):
from cookielib import CookieJar, DefaultCookiePolicy
#-------------------------------------------------------------------
# Then we test with the examples from draft-ietf-http-state-man-mec-03.txt
#
# 5. EXAMPLES
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
#
# 5.1 Example 1
#
# Most detail of request and response headers has been omitted. Assume
# the user agent has no stored cookies.
#
# 1. User Agent -> Server
#
# POST /acme/login HTTP/1.1
# [form data]
#
# User identifies self via a form.
#
# 2. Server -> User Agent
#
# HTTP/1.1 200 OK
# Set-Cookie2: Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"
#
# Cookie reflects user's identity.
cookie = interact_2965(
c, 'http://www.acme.com/acme/login',
'Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"')
self.assertTrue(not cookie)
#
# 3. User Agent -> Server
#
# POST /acme/pickitem HTTP/1.1
# Cookie: $Version="1"; Customer="WILE_E_COYOTE"; $Path="/acme"
# [form data]
#
# User selects an item for ``shopping basket.''
#
# 4. Server -> User Agent
#
# HTTP/1.1 200 OK
# Set-Cookie2: Part_Number="Rocket_Launcher_0001"; Version="1";
# Path="/acme"
#
# Shopping basket contains an item.
cookie = interact_2965(c, 'http://www.acme.com/acme/pickitem',
'Part_Number="Rocket_Launcher_0001"; '
'Version="1"; Path="/acme"');
self.assertTrue(re.search(
r'^\$Version="?1"?; Customer="?WILE_E_COYOTE"?; \$Path="/acme"$',
cookie))
#
# 5. User Agent -> Server
#
# POST /acme/shipping HTTP/1.1
# Cookie: $Version="1";
# Customer="WILE_E_COYOTE"; $Path="/acme";
# Part_Number="Rocket_Launcher_0001"; $Path="/acme"
# [form data]
#
# User selects shipping method from form.
#
# 6. Server -> User Agent
#
# HTTP/1.1 200 OK
# Set-Cookie2: Shipping="FedEx"; Version="1"; Path="/acme"
#
# New cookie reflects shipping method.
cookie = interact_2965(c, "http://www.acme.com/acme/shipping",
'Shipping="FedEx"; Version="1"; Path="/acme"')
self.assertTrue(re.search(r'^\$Version="?1"?;', cookie))
self.assertTrue(re.search(r'Part_Number="?Rocket_Launcher_0001"?;'
'\s*\$Path="\/acme"', cookie))
self.assertTrue(re.search(r'Customer="?WILE_E_COYOTE"?;\s*\$Path="\/acme"',
cookie))
#
# 7. User Agent -> Server
#
# POST /acme/process HTTP/1.1
# Cookie: $Version="1";
# Customer="WILE_E_COYOTE"; $Path="/acme";
# Part_Number="Rocket_Launcher_0001"; $Path="/acme";
# Shipping="FedEx"; $Path="/acme"
# [form data]
#
# User chooses to process order.
#
# 8. Server -> User Agent
#
# HTTP/1.1 200 OK
#
# Transaction is complete.
cookie = interact_2965(c, "http://www.acme.com/acme/process")
self.assertTrue(
re.search(r'Shipping="?FedEx"?;\s*\$Path="\/acme"', cookie) and
"WILE_E_COYOTE" in cookie)
#
# The user agent makes a series of requests on the origin server, after
# each of which it receives a new cookie. All the cookies have the same
# Path attribute and (default) domain. Because the request URLs all have
# /acme as a prefix, and that matches the Path attribute, each request
# contains all the cookies received so far.
def test_ietf_example_2(self):
from cookielib import CookieJar, DefaultCookiePolicy
# 5.2 Example 2
#
# This example illustrates the effect of the Path attribute. All detail
# of request and response headers has been omitted. Assume the user agent
# has no stored cookies.
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
# Imagine the user agent has received, in response to earlier requests,
# the response headers
#
# Set-Cookie2: Part_Number="Rocket_Launcher_0001"; Version="1";
# Path="/acme"
#
# and
#
# Set-Cookie2: Part_Number="Riding_Rocket_0023"; Version="1";
# Path="/acme/ammo"
interact_2965(
c, "http://www.acme.com/acme/ammo/specific",
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"',
'Part_Number="Riding_Rocket_0023"; Version="1"; Path="/acme/ammo"')
# A subsequent request by the user agent to the (same) server for URLs of
# the form /acme/ammo/... would include the following request header:
#
# Cookie: $Version="1";
# Part_Number="Riding_Rocket_0023"; $Path="/acme/ammo";
# Part_Number="Rocket_Launcher_0001"; $Path="/acme"
#
# Note that the NAME=VALUE pair for the cookie with the more specific Path
# attribute, /acme/ammo, comes before the one with the less specific Path
# attribute, /acme. Further note that the same cookie name appears more
# than once.
cookie = interact_2965(c, "http://www.acme.com/acme/ammo/...")
self.assertTrue(
re.search(r"Riding_Rocket_0023.*Rocket_Launcher_0001", cookie))
# A subsequent request by the user agent to the (same) server for a URL of
# the form /acme/parts/ would include the following request header:
#
# Cookie: $Version="1"; Part_Number="Rocket_Launcher_0001"; $Path="/acme"
#
# Here, the second cookie's Path attribute /acme/ammo is not a prefix of
# the request URL, /acme/parts/, so the cookie does not get forwarded to
# the server.
cookie = interact_2965(c, "http://www.acme.com/acme/parts/")
self.assertIn("Rocket_Launcher_0001", cookie)
self.assertNotIn("Riding_Rocket_0023", cookie)
def test_rejection(self):
# Test rejection of Set-Cookie2 responses based on domain, path, port.
from cookielib import DefaultCookiePolicy, LWPCookieJar
pol = DefaultCookiePolicy(rfc2965=True)
c = LWPCookieJar(policy=pol)
max_age = "max-age=3600"
# illegal domain (no embedded dots)
cookie = interact_2965(c, "http://www.acme.com",
'foo=bar; domain=".com"; version=1')
self.assertTrue(not c)
# legal domain
cookie = interact_2965(c, "http://www.acme.com",
'ping=pong; domain="acme.com"; version=1')
self.assertEqual(len(c), 1)
# illegal domain (host prefix "www.a" contains a dot)
cookie = interact_2965(c, "http://www.a.acme.com",
'whiz=bang; domain="acme.com"; version=1')
self.assertEqual(len(c), 1)
# legal domain
cookie = interact_2965(c, "http://www.a.acme.com",
'wow=flutter; domain=".a.acme.com"; version=1')
self.assertEqual(len(c), 2)
# can't partially match an IP-address
cookie = interact_2965(c, "http://125.125.125.125",
'zzzz=ping; domain="125.125.125"; version=1')
self.assertEqual(len(c), 2)
# illegal path (must be prefix of request path)
cookie = interact_2965(c, "http://www.sol.no",
'blah=rhubarb; domain=".sol.no"; path="/foo"; '
'version=1')
self.assertEqual(len(c), 2)
# legal path
cookie = interact_2965(c, "http://www.sol.no/foo/bar",
'bing=bong; domain=".sol.no"; path="/foo"; '
'version=1')
self.assertEqual(len(c), 3)
# illegal port (request-port not in list)
cookie = interact_2965(c, "http://www.sol.no",
'whiz=ffft; domain=".sol.no"; port="90,100"; '
'version=1')
self.assertEqual(len(c), 3)
# legal port
cookie = interact_2965(
c, "http://www.sol.no",
r'bang=wallop; version=1; domain=".sol.no"; '
r'port="90,100, 80,8080"; '
r'max-age=100; Comment = "Just kidding! (\"|\\\\) "')
self.assertEqual(len(c), 4)
# port attribute without any value (current port)
cookie = interact_2965(c, "http://www.sol.no",
'foo9=bar; version=1; domain=".sol.no"; port; '
'max-age=100;')
self.assertEqual(len(c), 5)
# encoded path
# LWP has this test, but unescaping allowed path characters seems
# like a bad idea, so I think this should fail:
## cookie = interact_2965(c, "http://www.sol.no/foo/",
## r'foo8=bar; version=1; path="/%66oo"')
# but this is OK, because '<' is not an allowed HTTP URL path
# character:
cookie = interact_2965(c, "http://www.sol.no/<oo/",
r'foo8=bar; version=1; path="/%3coo"')
self.assertEqual(len(c), 6)
# save and restore
filename = test_support.TESTFN
try:
c.save(filename, ignore_discard=True)
old = repr(c)
c = LWPCookieJar(policy=pol)
c.load(filename, ignore_discard=True)
finally:
try: os.unlink(filename)
except OSError: pass
self.assertEqual(old, repr(c))
def test_url_encoding(self):
# Try some URL encodings of the PATHs.
# (the behaviour here has changed from libwww-perl)
from cookielib import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://www.acme.com/foo%2f%25/%3c%3c%0Anew%E5/%E5",
"foo = bar; version = 1")
cookie = interact_2965(
c, "http://www.acme.com/foo%2f%25/<<%0anewå/æøå",
'bar=baz; path="/foo/"; version=1');
version_re = re.compile(r'^\$version=\"?1\"?', re.I)
self.assertTrue("foo=bar" in cookie and version_re.search(cookie))
cookie = interact_2965(
c, "http://www.acme.com/foo/%25/<<%0anewå/æøå")
self.assertTrue(not cookie)
# unicode URL doesn't raise exception
cookie = interact_2965(c, u"http://www.acme.com/\xfc")
def test_mozilla(self):
# Save / load Mozilla/Netscape cookie file format.
from cookielib import MozillaCookieJar, DefaultCookiePolicy
year_plus_one = time.localtime()[0] + 1
filename = test_support.TESTFN
c = MozillaCookieJar(filename,
policy=DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://www.acme.com/",
"foo1=bar; max-age=100; Version=1")
interact_2965(c, "http://www.acme.com/",
'foo2=bar; port="80"; max-age=100; Discard; Version=1')
interact_2965(c, "http://www.acme.com/", "foo3=bar; secure; Version=1")
expires = "expires=09-Nov-%d 23:12:40 GMT" % (year_plus_one,)
interact_netscape(c, "http://www.foo.com/",
"fooa=bar; %s" % expires)
interact_netscape(c, "http://www.foo.com/",
"foob=bar; Domain=.foo.com; %s" % expires)
interact_netscape(c, "http://www.foo.com/",
"fooc=bar; Domain=www.foo.com; %s" % expires)
def save_and_restore(cj, ignore_discard):
try:
cj.save(ignore_discard=ignore_discard)
new_c = MozillaCookieJar(filename,
DefaultCookiePolicy(rfc2965=True))
new_c.load(ignore_discard=ignore_discard)
finally:
try: os.unlink(filename)
except OSError: pass
return new_c
new_c = save_and_restore(c, True)
self.assertEqual(len(new_c), 6) # none discarded
self.assertIn("name='foo1', value='bar'", repr(new_c))
new_c = save_and_restore(c, False)
self.assertEqual(len(new_c), 4) # 2 of them discarded on save
self.assertIn("name='foo1', value='bar'", repr(new_c))
def test_netscape_misc(self):
# Some additional Netscape cookies tests.
from cookielib import CookieJar
from urllib2 import Request
c = CookieJar()
headers = []
req = Request("http://foo.bar.acme.com/foo")
# Netscape allows a host part that contains dots
headers.append("Set-Cookie: Customer=WILE_E_COYOTE; domain=.acme.com")
res = FakeResponse(headers, "http://www.acme.com/foo")
c.extract_cookies(res, req)
# and that the domain is the same as the host without adding a leading
# dot to the domain. Should not quote even if strange chars are used
# in the cookie value.
headers.append("Set-Cookie: PART_NUMBER=3,4; domain=foo.bar.acme.com")
res = FakeResponse(headers, "http://www.acme.com/foo")
c.extract_cookies(res, req)
req = Request("http://foo.bar.acme.com/foo")
c.add_cookie_header(req)
self.assertTrue(
"PART_NUMBER=3,4" in req.get_header("Cookie") and
"Customer=WILE_E_COYOTE" in req.get_header("Cookie"))
def test_intranet_domains_2965(self):
# Test handling of local intranet hostnames without a dot.
from cookielib import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965=True))
interact_2965(c, "http://example/",
"foo1=bar; PORT; Discard; Version=1;")
cookie = interact_2965(c, "http://example/",
'foo2=bar; domain=".local"; Version=1')
self.assertIn("foo1=bar", cookie)
interact_2965(c, "http://example/", 'foo3=bar; Version=1')
cookie = interact_2965(c, "http://example/")
self.assertIn("foo2=bar", cookie)
self.assertEqual(len(c), 3)
def test_intranet_domains_ns(self):
from cookielib import CookieJar, DefaultCookiePolicy
c = CookieJar(DefaultCookiePolicy(rfc2965 = False))
interact_netscape(c, "http://example/", "foo1=bar")
cookie = interact_netscape(c, "http://example/",
'foo2=bar; domain=.local')
self.assertEqual(len(c), 2)
self.assertIn("foo1=bar", cookie)
cookie = interact_netscape(c, "http://example/")
self.assertIn("foo2=bar", cookie)
self.assertEqual(len(c), 2)
def test_empty_path(self):
from cookielib import CookieJar, DefaultCookiePolicy
from urllib2 import Request
# Test for empty path
# Broken web-server ORION/1.3.38 returns to the client response like
#
# Set-Cookie: JSESSIONID=ABCDERANDOM123; Path=
#
# ie. with Path set to nothing.
# In this case, extract_cookies() must set cookie to / (root)
c = CookieJar(DefaultCookiePolicy(rfc2965 = True))
headers = []
req = Request("http://www.ants.com/")
headers.append("Set-Cookie: JSESSIONID=ABCDERANDOM123; Path=")
res = FakeResponse(headers, "http://www.ants.com/")
c.extract_cookies(res, req)
req = Request("http://www.ants.com/")
c.add_cookie_header(req)
self.assertEqual(req.get_header("Cookie"),
"JSESSIONID=ABCDERANDOM123")
self.assertEqual(req.get_header("Cookie2"), '$Version="1"')
# missing path in the request URI
req = Request("http://www.ants.com:8080")
c.add_cookie_header(req)
self.assertEqual(req.get_header("Cookie"),
"JSESSIONID=ABCDERANDOM123")
self.assertEqual(req.get_header("Cookie2"), '$Version="1"')
def test_session_cookies(self):
from cookielib import CookieJar
from urllib2 import Request
year_plus_one = time.localtime()[0] + 1
# Check session cookies are deleted properly by
# CookieJar.clear_session_cookies method
req = Request('http://www.perlmeister.com/scripts')
headers = []
headers.append("Set-Cookie: s1=session;Path=/scripts")
headers.append("Set-Cookie: p1=perm; Domain=.perlmeister.com;"
"Path=/;expires=Fri, 02-Feb-%d 23:24:20 GMT" %
year_plus_one)
headers.append("Set-Cookie: p2=perm;Path=/;expires=Fri, "
"02-Feb-%d 23:24:20 GMT" % year_plus_one)
headers.append("Set-Cookie: s2=session;Path=/scripts;"
"Domain=.perlmeister.com")
headers.append('Set-Cookie2: s3=session;Version=1;Discard;Path="/"')
res = FakeResponse(headers, 'http://www.perlmeister.com/scripts')
c = CookieJar()
c.extract_cookies(res, req)
# How many session/permanent cookies do we have?
counter = {"session_after": 0,
"perm_after": 0,
"session_before": 0,
"perm_before": 0}
for cookie in c:
key = "%s_before" % cookie.value
counter[key] = counter[key] + 1
c.clear_session_cookies()
# How many now?
for cookie in c:
key = "%s_after" % cookie.value
counter[key] = counter[key] + 1
self.assertTrue(not (
# a permanent cookie got lost accidentally
counter["perm_after"] != counter["perm_before"] or
# a session cookie hasn't been cleared
counter["session_after"] != 0 or
# we didn't have session cookies in the first place
counter["session_before"] == 0))
def test_main(verbose=None):
test_support.run_unittest(
DateTimeTests,
HeaderTests,
CookieTests,
FileCookieJarTests,
LWPCookieTests,
)
if __name__ == "__main__":
test_main(verbose=True)
| ktan2020/legacy-automation | win/Lib/test/test_cookielib.py | Python | mit | 73,964 | 0.001731 |
from bs4 import BeautifulSoup
from requests import Session
from ..extensions import cache
# Settings
URL = 'http://emonitoring.poczta-polska.pl/wssClient.php'
# Init
SESSION = Session()
def get_number(real_number, s=None):
s = s or Session()
soup = BeautifulSoup(s.post(URL, data={'n': real_number}).text.encode('utf8'))
sledzenie = soup.find(id='sledzenie_td')
if sledzenie:
return {'no': real_number, 'meta': sledzenie, 'history': soup.find(id='zadarzenia_td')}
return False
def quest_number(nake_number, s=None):
key = "nake_number=%s" % (nake_number)
rv = cache.get(key)
if rv is None:
for i in range(0, 10):
data = get_number("00" + str(nake_number) + str(i), s)
if data:
rv = data
break
cache.set(key, rv, timeout=60*60*6) # 6 hours cache
return rv
def quest_range(start_string='00559007734046803928', end_string='0055900773404680394', s=None):
nake_start = int(start_string[0:19])
nake_end = int(end_string[0:19])
if nake_end-nake_start >= 50:
return []
result = []
for x in range(nake_start, nake_end):
result.append(quest_number(x))
return result
| ad-m/sledzenie_listow | sledzenie_listow/public/utils.py | Python | bsd-3-clause | 1,220 | 0.002459 |
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
class WeibopError(Exception):
"""Weibopy exception"""
def __init__(self, reason):
self.reason = reason.encode('utf-8')
def __str__(self):
return self.reason
| sunner/buzz2weibo | weibopy/error.py | Python | mit | 256 | 0.003906 |
# -*- coding: utf-8 -*-
#
# SymPy documentation build configuration file, created by
# sphinx-quickstart.py on Sat Mar 22 19:34:32 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys
# If your extensions are in another directory, add it here.
sys.path.extend(['../sympy', 'ext'])
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.addons.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.mathjax',
'numpydoc', 'sympylive',]
# Use this to use pngmath instead
#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.pngmath', ]
# MathJax file, which is free to use. See http://www.mathjax.org/docs/2.0/start.html
mathjax_path = 'https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?config=TeX-AMS_HTML-full'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'SymPy'
copyright = '2008, 2009, 2010, 2011, 2012 SymPy Development Team'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = '0.7.2'
# The full version, including alpha/beta/rc tags.
release = '0.7.2-git'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# Translations:
locale_dirs = ["i18n/"]
# List of documents that shouldn't be included in the build.
#unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
html_logo = '_static/sympylogo.png'
html_favicon = '../logo/SymPy-Favicon.ico'
html_theme_options = {'collapsiblesidebar': True}
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'SymPydoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual], toctree_only).
# toctree_only is set to True so that the start file document itself is not included in the
# output, only the documents referenced by it via TOC trees. The extra stuff in the master
# document is intended to show up in the HTML, but doesn't really belong in the LaTeX output.
latex_documents = [('index', 'sympy-%s.tex' % release, 'SymPy Documentation',
'SymPy Development Team', 'manual', True)]
# Additional stuff for the LaTeX preamble.
# Tweaked to work with XeTeX.
latex_elements = {
'babel': '',
'fontenc': r'''
\usepackage{amssymb}
\usepackage{fontspec}
\defaultfontfeatures{Mapping=tex-text}
\setmainfont{DejaVu Serif}
\setsansfont{DejaVu Sans}
\setmonofont{DejaVu Sans Mono}
''',
'fontpkg': '',
'inputenc': '',
'utf8extra': '',
'preamble': ''
}
# SymPy logo on title page
latex_logo = '_static/sympylogo.png'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# Show page numbers next to internal references
latex_show_pagerefs = True
# We use False otherwise the module index gets generated twice.
latex_use_modindex = False
default_role = 'math'
pngmath_divpng_args = ['-gamma 1.5','-D 110']
# Note, this is ignored by the mathjax extension
# Any \newcommand should be defined in the file
pngmath_latex_preamble = '\\usepackage{amsmath}\n'+\
'\\usepackage{bm}\n'+\
'\\usepackage{amsfonts}\n'+\
'\\usepackage{amssymb}\n'+\
'\\setlength{\\parindent}{0pt}\n'
texinfo_documents = [
(master_doc, 'sympy', 'SymPy Documentation',
'SymPy Development Team',
'SymPy', 'Computer algebra system (CAS) in Python', 'Programming',
1),
]
| flacjacket/sympy | doc/src/conf.py | Python | bsd-3-clause | 6,093 | 0.005416 |
from __future__ import unicode_literals
from unittest import TestCase
from shutil import rmtree
from tempfile import mkdtemp
from os import makedirs
from os.path import join, exists, dirname
from awsfabrictasks.s3.api import dirlist_absfilenames
from awsfabrictasks.s3.api import localpath_to_s3path
from awsfabrictasks.s3.api import s3path_to_localpath
def makefile(tempdir, path, contents):
path = join(tempdir, *path.split('/'))
if not exists(dirname(path)):
makedirs(dirname(path))
open(path, 'wb').write(contents.encode('utf-8'))
return path
class TestDirlistAbsfilenames(TestCase):
def setUp(self):
self.tempdir = mkdtemp()
files = (('hello/world.txt', 'Hello world'),
('test.py', 'print "test"'),
('hello/cruel/world.txt', 'Cruel?'))
self.paths = set()
for path, contents in files:
realpath = makefile(self.tempdir, path, contents)
self.paths.add(realpath)
def tearDown(self):
rmtree(self.tempdir)
def test_dirlist_absfilenames(self):
result = dirlist_absfilenames(self.tempdir)
self.assertEquals(result, self.paths)
class TestLocalpathToS3path(TestCase):
def setUp(self):
self.tempdir = mkdtemp()
makefile(self.tempdir, 'hello/world.txt', '')
def tearDown(self):
rmtree(self.tempdir)
def test_localpath_to_s3path(self):
s3path = localpath_to_s3path(self.tempdir, join(self.tempdir, 'hello/world.txt'), 'my/test')
self.assertEquals(s3path, 'my/test/hello/world.txt')
def test_s3path_to_localpath(self):
localpath = s3path_to_localpath('mydir/', 'mydir/hello/world.txt', join(self.tempdir, 'my', 'test'))
self.assertEquals(localpath, join(self.tempdir, 'my', 'test', 'hello', 'world.txt'))
| espenak/awsfabrictasks | awsfabrictasks/tests/s3/test_api.py | Python | bsd-3-clause | 1,830 | 0.002186 |
## begin license ##
#
# "Meresco Harvester" consists of two subsystems, namely an OAI-harvester and
# a web-control panel.
# "Meresco Harvester" is originally called "Sahara" and was developed for
# SURFnet by:
# Seek You Too B.V. (CQ2) http://www.cq2.nl
#
# Copyright (C) 2006-2007 SURFnet B.V. http://www.surfnet.nl
# Copyright (C) 2007-2008 SURF Foundation. http://www.surf.nl
# Copyright (C) 2007-2009, 2011 Seek You Too (CQ2) http://www.cq2.nl
# Copyright (C) 2007-2009 Stichting Kennisnet Ict op school. http://www.kennisnetictopschool.nl
# Copyright (C) 2009 Tilburg University http://www.uvt.nl
# Copyright (C) 2011, 2020-2021 Stichting Kennisnet https://www.kennisnet.nl
# Copyright (C) 2020-2021 Data Archiving and Network Services https://dans.knaw.nl
# Copyright (C) 2020-2021 SURF https://www.surf.nl
# Copyright (C) 2020-2021 Seecr (Seek You Too B.V.) https://seecr.nl
# Copyright (C) 2020-2021 The Netherlands Institute for Sound and Vision https://beeldengeluid.nl
#
# This file is part of "Meresco Harvester"
#
# "Meresco Harvester" is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# "Meresco Harvester" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with "Meresco Harvester"; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
## end license ##
import unittest
import datetime, tempfile, os, shutil
from meresco.harvester.throughputanalyser import parseToTime, ThroughputAnalyser, ThroughputReport
class ThroughputAnalyserTest(unittest.TestCase):
def setUp(self):
self.mockAnalyseRepository_arguments = []
self.testdir = os.path.join(tempfile.gettempdir(), 'throughputanalysertest')
not os.path.isdir(self.testdir) and os.makedirs(self.testdir)
def tearDown(self):
shutil.rmtree(self.testdir)
def testParseToTime(self):
timeString = "1999-12-03 12:34:35.123"
date = parseToTime(timeString)
self.assertEqual((1999,12,3,12,34,35,123000), (date.year,date.month,date.day, date.hour, date.minute, date.second, date.microsecond))
date = parseToTime("2006-08-04 10:40:50.644")
self.assertEqual((2006,8,4,10,40,50,644000), (date.year,date.month,date.day, date.hour, date.minute, date.second, date.microsecond))
def testParseToTimeDiff(self):
date1 = parseToTime("1999-12-03 12:34:35.123")
date2 = parseToTime("1999-12-03 12:34:36.423")
delta = date2 - date1
self.assertEqual(1.3, delta.seconds + delta.microseconds/1000000.0)
def testAnalyse(self):
t = ThroughputAnalyser(eventpath = self.testdir)
t._analyseRepository = self.mockAnalyseRepository
report = t.analyse(['repo1','repo2'], '2006-08-31')
self.assertEqual(1000, report.records)
self.assertEqual(2000.0, report.seconds)
self.assertEqual(['repo1', 'repo2'], self.mockAnalyseRepository_arguments)
def testAnalyseNothing(self):
t = ThroughputAnalyser(eventpath = self.testdir)
t._analyseRepository = self.mockAnalyseRepository
report = t.analyse([], '2006-08-31')
self.assertEqual(0, report.records)
self.assertEqual(0.0, report.seconds)
self.assertEqual('-' , report.recordsPerSecond())
self.assertEqual('-' , report.recordsPerDay())
def testAnalyseRepository(self):
r = open(os.path.join(self.testdir, 'repo1.events'), 'w')
try:
r.write("""
[2006-08-30 00:00:15.500] ENDHARVEST [repo1]
[2006-08-30 01:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-30 01:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1000, ResumptionToken: r1
[2006-08-30 01:00:15.500] ENDHARVEST [repo1]
[2006-08-31 01:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-31 01:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1200, ResumptionToken: r1
[2006-08-31 01:00:15.500] ENDHARVEST [repo1]
[2006-08-31 02:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-31 02:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1400, ResumptionToken: r2
[2006-08-31 02:00:25.500] ENDHARVEST [repo1]
[2006-08-31 03:00:00.000] STARTHARVEST [repo1] Uploader connected ...
[2006-08-31 03:00:10.000] SUCCES [repo1] Harvested/Uploaded/Deleted/Total: 200/200/0/1600, ResumptionToken: r3
[2006-08-31 03:00:35.500] ENDHARVEST [repo1]
""")
finally:
r.close()
t = ThroughputAnalyser(eventpath = self.testdir)
records, seconds = t._analyseRepository('repo1', '2006-08-31')
self.assertEqual(600, records)
self.assertEqual(76.5, seconds)
def testAnalyseNonExistingRepository(self):
t = ThroughputAnalyser(eventpath = self.testdir)
records, seconds = t._analyseRepository('repository', '2006-08-31')
self.assertEqual(0, records)
self.assertEqual(0.0, seconds)
def testReportOnEmptyEventsFile(self):
t = ThroughputAnalyser(eventpath = self.testdir)
records, seconds = t._analyseRepository('repo1', '2006-08-31')
self.assertEqual(0, records)
self.assertEqual(0, seconds)
def testReport(self):
report = ThroughputReport()
report.add(100000,10000.0)
self.assertEqual('10.00', report.recordsPerSecond())
self.assertEqual('864000', report.recordsPerDay())
self.assertEqual("02:46:40", report.hmsString())
#Mock self shunt
def mockAnalyseRepository(self, repositoryName, dateSince):
self.mockAnalyseRepository_arguments.append(repositoryName)
return 500, 1000.0
| seecr/meresco-harvester | test/throughputanalysertest.py | Python | gpl-2.0 | 6,185 | 0.011318 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2010-2010 by drubin <drubin at smartcube.co.za>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Allows you to visually see if there are updates to your weechat system
#Versions
# 0.1 drubin - First release.
# - Basic functionality to save version history of your config files (only git, bzr)
# 0.2 ShockkPony - Fixed massive weechat startup time caused by initial config loading
SCRIPT_NAME = "confversion"
SCRIPT_AUTHOR = "drubin <drubin at smartcube.co.za>"
SCRIPT_VERSION = "0.2"
SCRIPT_LICENSE = "GPL3"
SCRIPT_DESC = "Stores version controlled history of your configuration files"
import_ok = True
import subprocess
try:
import weechat
except ImportError:
print "This script must be run under WeeChat."
print "Get WeeChat now at: http://www.weechat.org/"
import_ok = False
# script options
settings = {
#Currently supports git and bzr and possibly other that support simple "init" "add *.conf" "commit -m "message" "
"versioning_method" : "git",
"commit_each_change" : "true",
"commit_message" : "Commiting changes",
#Allows you to not auto commit stuff that relates to these configs
#, (comma) seperated list of config options
#The toggle_nicklist script can make this property annoying.
"auto_commit_ignore" : "weechat.bar.nicklist.hidden",
}
def shell_in_home(cmd):
try:
output = file("/dev/null","w")
subprocess.Popen(ver_method()+" "+cmd, cwd = weechat_home(),
stdout= output, stderr=output, shell=True)
except Exception as e:
print e
def weechat_home():
return weechat.info_get ("weechat_dir", "")
def ver_method():
return weechat.config_get_plugin("versioning_method")
def init_repo():
#Set up version control (doesn't matter if previously setup for bzr, git)
shell_in_home("init")
#Save first import OR on start up if needed.
commit_cb()
confversion_commit_finish_hook = 0
def commit_cb(data=None, remaning=None):
global confversion_commit_finish_hook
# only hook timer if not already hooked
if confversion_commit_finish_hook == 0:
confversion_commit_finish_hook = weechat.hook_timer(500, 0, 1, "commit_cb_finish", "")
return weechat.WEECHAT_RC_OK
def commit_cb_finish(data=None, remaining=None):
global confversion_commit_finish_hook
# save before doing commit
weechat.command("","/save")
# add all config changes to git
shell_in_home("add ./*.conf")
# do the commit
shell_in_home("commit -m \"%s\"" % weechat.config_get_plugin("commit_message"))
# set hook back to 0
confversion_commit_finish_hook = 0
return weechat.WEECHAT_RC_OK
def conf_update_cb(data, option, value):
#Commit data if not part of ignore list.
if weechat.config_get_plugin("commit_each_change") == "true" and not option in weechat.config_get_plugin("auto_commit_ignore").split(","):
#Call use pause else /save will be called before the config is actually saved to disc
#This is kinda hack but better input would be appricated.
weechat.hook_timer(500, 0, 1, "commit_cb", "")
return weechat.WEECHAT_RC_OK
def confversion_cmd(data, buffer, args):
commit_cb()
return weechat.WEECHAT_RC_OK
if weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, SCRIPT_LICENSE,
SCRIPT_DESC, "", ""):
for option, default_value in settings.iteritems():
if weechat.config_get_plugin(option) == "":
weechat.config_set_plugin(option, default_value)
weechat.hook_command("confversion", "Saves configurations to version control", "",
"",
"", "confversion_cmd", "")
init_repo()
hook = weechat.hook_config("*", "conf_update_cb", "")
| qguv/config | weechat/plugins/python/confversion.py | Python | gpl-3.0 | 4,417 | 0.010414 |
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""email package exception classes."""
class MessageError(Exception):
"""Base class for errors in the email package."""
class MessageParseError(MessageError):
"""Base class for message parsing errors."""
class HeaderParseError(MessageParseError):
"""Error while parsing headers."""
class BoundaryError(MessageParseError):
"""Couldn't find terminating boundary."""
class MultipartConversionError(MessageError, TypeError):
"""Conversion to a multipart is prohibited."""
class CharsetError(MessageError):
"""An illegal charset was given."""
# These are parsing defects which the parser was able to work around.
class MessageDefect:
"""Base class for a message defect."""
def __init__(self, line=None):
self.line = line
class NoBoundaryInMultipartDefect(MessageDefect):
"""A message claimed to be a multipart but had no boundary parameter."""
class StartBoundaryNotFoundDefect(MessageDefect):
"""The claimed start boundary was never found."""
class FirstHeaderLineIsContinuationDefect(MessageDefect):
"""A message had a continuation line as its first header line."""
class MisplacedEnvelopeHeaderDefect(MessageDefect):
"""A 'Unix-from' header was found in the middle of a header block."""
class MalformedHeaderDefect(MessageDefect):
"""Found a header that was missing a colon, or was otherwise malformed."""
class MultipartInvariantViolationDefect(MessageDefect):
"""A message claimed to be a multipart but no subparts were found."""
| nmercier/linux-cross-gcc | win32/bin/Lib/email/errors.py | Python | bsd-3-clause | 1,685 | 0.005341 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import deluge.component as component
log = logging.getLogger(__name__)
class EventManager(component.Component):
def __init__(self):
component.Component.__init__(self, "EventManager")
self.handlers = {}
def emit(self, event):
"""
Emits the event to interested clients.
:param event: DelugeEvent
"""
# Emit the event to the interested clients
component.get("RPCServer").emit_event(event)
# Call any handlers for the event
if event.name in self.handlers:
for handler in self.handlers[event.name]:
# log.debug("Running handler %s for event %s with args: %s", event.name, handler, event.args)
try:
handler(*event.args)
except Exception as ex:
log.error("Event handler %s failed in %s with exception %s", event.name, handler, ex)
def register_event_handler(self, event, handler):
"""
Registers a function to be called when a `:param:event` is emitted.
:param event: str, the event name
:param handler: function, to be called when `:param:event` is emitted
"""
if event not in self.handlers:
self.handlers[event] = []
if handler not in self.handlers[event]:
self.handlers[event].append(handler)
def deregister_event_handler(self, event, handler):
"""
Deregisters an event handler function.
:param event: str, the event name
:param handler: function, currently registered to handle `:param:event`
"""
if event in self.handlers and handler in self.handlers[event]:
self.handlers[event].remove(handler)
| bendykst/deluge | deluge/core/eventmanager.py | Python | gpl-3.0 | 2,071 | 0.001931 |
import io
import os
import shlex
import six
from ..utils import memoize
from ..conf import settings
class Generic(object):
def get_aliases(self):
return {}
def _expand_aliases(self, command_script):
aliases = self.get_aliases()
binary = command_script.split(' ')[0]
if binary in aliases:
return command_script.replace(binary, aliases[binary], 1)
else:
return command_script
def from_shell(self, command_script):
"""Prepares command before running in app."""
return self._expand_aliases(command_script)
def to_shell(self, command_script):
"""Prepares command for running in shell."""
return command_script
def app_alias(self, fuck):
return "alias {0}='eval $(TF_ALIAS={0} PYTHONIOENCODING=utf-8 " \
"thefuck $(fc -ln -1))'".format(fuck)
def _get_history_file_name(self):
return ''
def _get_history_line(self, command_script):
return ''
@memoize
def get_history(self):
return list(self._get_history_lines())
def _get_history_lines(self):
"""Returns list of history entries."""
history_file_name = self._get_history_file_name()
if os.path.isfile(history_file_name):
with io.open(history_file_name, 'r',
encoding='utf-8', errors='ignore') as history_file:
lines = history_file.readlines()
if settings.history_limit:
lines = lines[-settings.history_limit:]
for line in lines:
prepared = self._script_from_history(line) \
.strip()
if prepared:
yield prepared
def and_(self, *commands):
return u' && '.join(commands)
def how_to_configure(self):
return
def split_command(self, command):
"""Split the command using shell-like syntax."""
if six.PY2:
return [s.decode('utf8') for s in shlex.split(command.encode('utf8'))]
return shlex.split(command)
def quote(self, s):
"""Return a shell-escaped version of the string s."""
if six.PY2:
from pipes import quote
else:
from shlex import quote
return quote(s)
def _script_from_history(self, line):
return line
| PLNech/thefuck | thefuck/shells/generic.py | Python | mit | 2,400 | 0.000417 |
###
# Copyright (c) 2005, Ali Afshar
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from twisted.cred import portal, checkers, credentials, error
from twisted.conch.checkers import SSHPublicKeyDatabase
from twisted.conch.credentials import ISSHPrivateKey
from twisted.python import failure
from twisted.internet import defer
from twisted.conch.ssh import keys
class SBCredChecker(object):
""" SSH Username and Password Credential checker """
# this implements line tells the portal that we can handle un/pw
__implements__ = (checkers.ICredentialsChecker,)
credentialInterfaces = (credentials.IUsernamePassword,)
def requestAvatarId(self, credentials):
self.cb.log.debug('twisted checker checking %s',
credentials.username)
""" Return an avatar id or return an error """
a = self.cb.getUser(protocol=self.cb.PROTOCOL,
username=credentials.username,
password=credentials.password,
peer=credentials.peer)
if a:
return a
else:
return failure.Failure(error.UnauthorizedLogin())
class SBPublicKeyChecker(object):
""" Public key checker """
__implements__ = (checkers.ICredentialsChecker,)
credentialInterfaces = (ISSHPrivateKey,)
def requestAvatarId(self, credentials):
a = self.cb.getUser(protocol=self.cb.PROTOCOL,
username=credentials.username,
blob=credentials.blob,
peer=credentials.peer)
#except:
# pass
if a:
return a
else:
return failure.Failure(error.UnauthorizedLogin())
#class SBPublicKeyChecker(SSHPublicKeyDatabase):
# credentialInterfaces = ISSHPrivateKey,
# __implements__ = ICredentialsChecker
#
# def requestAvatarId(self, credentials):
# if not self.checkKey(credentials):
# return defer.fail(UnauthorizedLogin())
# if not credentials.signature:
# return defer.fail(error.ValidPublicKey())
# else:
# try:
# pubKey = keys.getPublicKeyObject(data = credentials.blob)
# if keys.verifySignature(pubKey, credentials.signature,
# credentials.sigData):
# return defer.succeed(credentials.username)
# except:
# pass
# return defer.fail(UnauthorizedLogin())
#
# def checkKey(self, credentials):
# sshDir = os.path.expanduser('~%s/.ssh/' % credentials.username)
# if sshDir.startswith('~'): # didn't expand
# return 0
# uid, gid = os.geteuid(), os.getegid()
# ouid, ogid = pwd.getpwnam(credentials.username)[2:4]
# os.setegid(0)
# os.seteuid(0)
# os.setegid(ogid)
# os.seteuid(ouid)
# for name in ['authorized_keys2', 'authorized_keys']:
# if not os.path.exists(sshDir+name):
# continue
# lines = open(sshDir+name).xreadlines()
# os.setegid(0)
# os.seteuid(0)
# os.setegid(gid)
# os.seteuid(uid)
# for l in lines:
# l2 = l.split()
# if len(l2) < 2:
# continue
# try:
# if base64.decodestring(l2[1]) == credentials.blob:
# return 1
# except binascii.Error:
# continue
# return 0
class SBPortal(portal.Portal):
pass
class SBRealm:
__implements__ = portal.IRealm
def __init__(self, userclass):
self.userclass = userclass
def requestAvatar(self, avatarId, mind, *interfaces):
self.cb.cb.log.critical('%s', interfaces)
av = self.userclass(avatarId)
av.cb = self.cb
return interfaces[0], av, lambda: None
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| kg-bot/SupyBot | plugins/Gateway/gwcred.py | Python | gpl-3.0 | 5,416 | 0.003693 |
from setuptools import setup, find_packages
setup(
name="RaspberryRacer",
version="0.1",
description="Raspberry Racer",
author="Diez B. Roggisch",
author_email="deets@web.de",
entry_points= {
'console_scripts' : [
'rracer = rracer.main:main',
]},
install_requires = [
],
zip_safe=True,
packages=find_packages(),
classifiers = [
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| deets/raspberry-racer | python/setup.py | Python | mit | 559 | 0.014311 |
###############################################################################
# Insert
###############################################################################
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from bs4 import BeautifulSoup
import json
import os.path
import random
import requests
import web
from xml.sax import saxutils
import httpconfig
class InsertFactory(object):
def CreateInsert(self, ccssType, action, opts, data):
web.debug("CreateInsert")
web.debug(" ccssType=%s" % ccssType)
web.debug(" action=%s" % action)
web.debug(" opts=%r" % opts)
web.debug(" data=%r" % data)
web.debug("CreateInsert: ")
web.debug("CreateInsert: data = %s" % data)
print("CreateInsert: data = %s" % data)
insert = None
if ccssType == "/initiative":
if action == "/create":
insert = InitiativeCreate(opts, data)
elif action == "/update":
insert = InitiativeUpdate(opts, data)
elif ccssType == "/framework":
if action == "/create":
insert = FrameworkCreate(opts, data)
elif action == "/update":
insert = FrameworkUpdate(opts, data)
elif ccssType == "/set":
if action == "/create":
insert = SetCreate(opts, data)
elif action == "/update":
insert = SetUpdate(opts, data)
elif ccssType == "/grade_level":
if action == "/create":
insert = GradeCreate(opts, data)
elif action == "/update":
insert = GradeUpdate(opts, data)
elif ccssType == "/domain":
if action == "/create":
insert = DomainCreate(opts, data)
elif action == "/update":
insert = DomainUpdate(opts, data)
elif ccssType == "/cluster":
if action == "/create":
insert = ClusterCreate(opts, data)
elif action == "/update":
insert = ClusterUpdate(opts, data)
elif ccssType == "/standard":
if action == "/create":
insert = StandardCreate(opts, data)
elif action == "/update":
insert = StandardUpdate(opts, data)
else:
raise web.NoMethod()
elif ccssType == "/standard_component":
if action == "/create":
insert = ComponentCreate(opts, data)
elif action == "/update":
insert = ComponentUpdate(opts, data)
elif ccssType == "/strand":
if action == "/create":
insert = StrandCreate(opts, data)
elif action == "/update":
insert = StrandUpdate(opts, data)
elif ccssType == "/anchor_standard_section":
if action == "/create":
insert = SectionCreate(opts, data)
elif action == "/update":
insert = SectionUpdate(opts, data)
elif ccssType == "/anchor_standard":
if action == "/create":
insert = AnchorCreate(opts, data)
elif action == "/update":
insert = AnchorUpdate(opts, data)
elif ccssType == "/competency_path":
web.debug("CreateInsert: Matched on type: %s" % ccssType)
if action == "/create":
web.debug("CreateInsert: Matched on action: %s" % action)
insert = PathCreate(opts, data)
else:
raise web.NoMethod()
elif ccssType == "/learning_resource":
if action == "/create":
insert = ResourceCreate(opts, data)
else:
raise web.NoMethod()
elif ccssType == "/competency_container":
if action == "/create":
insert = ContainerCreate(opts, data)
elif action == "/update":
insert = ContainerUpdate(opts, data)
elif ccssType == "/competency":
if action == "/create":
insert = CompetencyCreate(opts, data)
elif action == "/update":
insert = CompetencyUpdate(opts, data)
elif ccssType == "/strand":
if action == "/create":
insert = StrandCreate(opts, data)
elif action == "/update":
insert = StrandUpdate(opts, data)
else:
raise web.NoMethod()
web.debug("CreateInsert: insert = %r" % insert)
return insert
class Insert(object):
"""Base class for inserts"""
def __init__(self, type, opts, data, httpConfig=None):
web.debug("Insert.__init__")
self.type = type
self.path = "/entity/create"
self.opts = opts
if not "access_token" in opts:
opts["access_token"] = "letmein"
if not "admin_access_tokens" in opts:
opts["admin_access_tokens"] = {"letmein":"LRI_ADMIN_USER_0"}
self.data = data
self.returnFormat = opts.get("format", "xml")
self.inserts = []
self.httpConfig = httpConfig
if self.httpConfig is None:
self.httpConfig = httpconfig.HttpConfig(web.ctx.env["DOCUMENT_ROOT"])
def __repr__(self):
return "Insert(%s, %r, %r, httpConfig=%r)" % (self.type,
self.opts,
self.data,
self.httpConfig)
def __str__(self):
return """Insert:
type=%s,
opts=%r,
data=%r,
returnFormat=%s,
inserts=%r,
httpConfig=%r,
urls=%r""" % (self.type,
self.opts,
self.data,
self.returnFormat,
self.inserts,
self.httpConfig,
self.getUrls())
def buildUrn(self, parts):
parts.insert(0, "urn")
return ":".join(parts)
def buildId(self, namespace, uid):
parts = (namespace, self.type, uid)
return self.buildUrn(parts)
def toUrlForm(self, insert):
decodedOpts = json.dumps(self.opts)
web.debug("Insert.toUrlForm: decodedOpts = %s" % decodedOpts)
self.url = "http://%s:%d%s?q=%s&opts=%s" % (self.httpConfig.config["serverhost"],
self.httpConfig.config["serverport"],
self.path,
insert,
decodedOpts)
return self.url
def getUrls(self):
"""Returns URL that can be sent to LRI server"""
urls = []
for insert in self.inserts:
urls.append(self.toUrlForm(insert))
return urls
def getBaseProps(self, soup):
"""Extract common LRI properties from XML"""
props = {}
try:
key = "urn:lri:property_type:contains"
props["children"] = [x.getText().strip() for x in soup.find(key=key).find_all("value")]
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
props["creator"] = soup.find(key="urn:lri:property_type:creator").getText().strip()
props["desc"] = soup.find(key="urn:lri:property_type:description").getText().strip()
props["id"] = soup.find(key="urn:lri:property_type:id").getText().strip()
props["name"] = soup.find(key="urn:lri:property_type:name").getText().strip()
props["uid"] = soup.find(key="uid").getText().strip()
web.debug("Insert.getBaseProps: base props = %r" % props)
return props
def parseXml(self, xml):
"""Parses XML containing new entity"""
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
props = self.getBaseProps(soup)
return props
def parseJson(self, decodedJson):
raise NotImplementedError("Insert.parseJson is not implemented")
pass
def buildInserts(self, props):
"""Builds LRI inserts using props from parsed content"""
query = {}
if "children" in props:
query["urn:lri:property_type:contains"] = props["children"]
query["urn:lri:property_type:creator"] = props["creator"]
query["urn:lri:property_type:description"] = props["desc"]
query["urn:lri:property_type:id"] = props["id"]
query["urn:lri:property_type:name"] = props["name"]
query["urn:lri:property_type:types"] = ["urn:lri:entity_type:competency",
"urn:lri:entity_type:learning_objective",
"urn:lri:entity_type:thing"]
web.debug("Insert.buildInserts: query = %s" % query)
return query
def getInserts(self, contentFormat, content):
"""Returns LRI inserts from unparsed content"""
props = {}
if contentFormat == "xml":
props = self.parseXml(content)
elif contentFormat == "json":
raise web.BadRequest
else:
raise web.BadRequest
qList = self.buildInserts(props)
return qList
def parseOpts(self, opts, key):
"""Extracts key from opts and returns value, modified opts"""
ID = None
optsCopy = copy.deepcopy(opts)
if optsCopy.has_key(key):
ID = optsCopy[key]
del(optsCopy[key])
return ID, optsCopy
class PathCreateOld(Insert):
def __init__(self, opts, data):
raise DeprecatedError("PathCreateOld is deprecated")
Insert.__init__(self, "competency_path", opts, data)
self.path = "/entity/create"
self.opts = opts
if not "access_token" in opts:
opts["access_token"] = "letmein"
if not "admin_access_tokens" in opts:
opts["admin_access_tokens"] = {"letmein":"LRI_ADMIN_USER_0"}
self.data = data
self.returnFormat = opts.get("format", "xml")
self.inserts = self.getInserts(self.returnFormat, self.data)
web.debug(repr(self))
web.debug(str(self))
web.debug("PathCreateOld.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
raise DeprecatedError("PathCreateOld is deprecated")
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
path_id = soup.find(key="urn:lri:property_type:id").getText().strip()
path_name = soup.find(key="urn:lri:property_type:name").getText().strip()
path_desc = soup.find(key="urn:lri:property_type:description").getText().strip()
author_id = soup.find(key="urn:lri:property_type:authored_by").getText().strip()
competency_list = [x.getText().strip() for x in soup.find(key="urn:lri:property_type:path_step").find_all("value")]
print("\nparse_path_xml: path_id = %s" % path_id)
print("parse_path_xml: path_name = %s" % path_name)
print("parse_path_xml: path_description = %s" % path_desc)
print("parse_path_xml: author_id = %s" % author_id)
print("parse_path_xml: competency_list = %r" % competency_list)
return path_id, path_name, path_desc, author_id, competency_list
def parseJson(self, decodedJson):
"""Extracts keys from json doc. Expected format:
{
"urn:lri:property_type:id": path_id,
"urn:lri:property_type:name": path_name,
"urn:lri:property_type:description": path_description,
"urn:lri:property_type:authored_by": author_id,
"urn:lri:property_type:path_step": [
competency_id_1,
competency_id_2,
...
competency_id_n
]
}
"""
raise DeprecatedError("PathCrateOld is deprecated")
json_decoded = decodedJson
path_id = json_decoded["urn:lri:property_type:id"]
path_name = json_decoded["urn:lri:property_type:name"]
path_desc = json_decoded["urn:lri:property_type:description"]
author_id = json_decoded["urn:lri:property_type:authored_by"]
competency_list = json_decoded["urn:lri:property_type:path_step"]
return path_id, path_name, path_desc, author_id, competency_list
def buildInserts(self, pathId, pathName, pathDesc, authorId, competencyList):
"""Constructs queries that save a competency_path"""
raise DeprecatedError("PathCreateOld is deprecated")
path_id = pathId
path_name = pathName
path_description = pathDesc
author_id = authorId
competency_list = competencyList
print("\nbuild_path_query: path_id = %s" % path_id)
print("build_path_query: path_name = %s" % path_name)
print("build_path_query: path_description = %s" % path_description)
print("build_path_query: author_id = %s" % author_id)
print("build_path_query: competency_list = %r" % competency_list)
# Query to create competency_path
path_query = {}
path_query["urn:lri:property_type:id"] = path_id
path_query["urn:lri:property_type:name"] = path_name
path_query["urn:lri:property_type:description"] = path_description
path_query["urn:lri:property_type:authored_by"] = author_id
path_query["urn:lri:property_type:types"] = ["urn:lri:entity_type:thing",
"urn:lri:entity_type:competency",
"urn:lri:entity_type:competency_path"]
# path id = urn:ccss:ordering:path1
# competency id = urn:ccss:grade_levelLmath:K
# step_id = urn:ccss:step:ordering:path1-grade_level:math:K
# common = urn:ccss
# path id rest = ordering:path1
# competency rest = grade_level:math:K
# step_id = root + : + step + : + path_id_rest + - + competency_id_rest
# Queries to create path_steps
count = 1
seen = []
step_list = []
step_queries = []
step_id_basename = os.path.basename(path_id)
step_name_basename = os.path.basename(path_name)
path_id_namespace = os.path.dirname(path_id)
previous = None
for competency in competency_list:
print("\nbuild_save_path_query: competency = %s" % competency)
step_query = {}
print("build_save_path_query: path_id = %s" % path_id)
common = os.path.commonprefix([path_id, competency])
print("build_save_path_query: common = %s" % common)
root = common
print("build_save_path_query: root = %s" % root)
competency_parts = competency.split(root)
print("build_save_path_query: competency_parts = %r" % competency_parts)
path_parts = path_id.split(root)
print("build_save_path_query: path_parts = %r" % path_parts)
step_id = "%sstep:%s-%s" % (root, path_parts[1], competency_parts[1])
print("build_save_path_query: step_id = %s" % step_id)
step_name = os.path.basename(competency) #"%s_%s" % (step_id_basename, step_name_basename)
if competency in seen:
step_id = "%s-%d" % (step_id, count)
step_name = "%s-%d" % (step_name, count)
print("build_save_path_query: step_id = %s" % step_id)
print("build_save_path_query: step_name = %s" % step_name)
count += 1
step_query["urn:lri:property_type:id"] = step_id
step_query["urn:lri:property_type:name"] = step_name
step_query["urn:lri:property_type:types"] = ["urn:lri:entity_type:thing",
"urn:lri:entity_type:path_step"]
step_query["urn:lri:property_type:competency_in_path"] = competency
if previous != None:
step_query["urn:lri:property_type:previous"] = previous
previous = step_id
step_queries.append(step_query)
step_list.append(step_id)
seen.append(competency)
print("build_save_path_query: step_list: ")
for step in step_list:
print(step)
print("build_save_path_query: step_queries: ")
for query in step_queries:
action = "/entity/create"
print(action)
print(query)
# First create path_steps
queries = step_queries
# Finish path_query and add to list
path_query["urn:lri:property_type:path_step"] = step_list
queries.append(path_query)
# Convert to json
json_queries = []
for query in queries:
json_queries.append(json.dumps(query))
return json_queries
def getInserts(self, contentFormat, content):
"""Returns a list of lri queries that save a competency_path"""
raise DeprecatedError("PathCreateOld is deprecated")
content_format = contentFormat
self.path = "/entity/create"
path_id = path_name = path_desc = ""
author_id = ""
competency_list = []
if content_format == "xml":
path_id, path_name, path_desc, author_id, competency_list = self.parseXml(content)
elif content_format == "json":
path_id, path_name, path_desc, author_id, competency_list = self.parseJson(content)
else:
raise web.BadRequest
q_list = self.buildInserts(path_id, path_name, path_desc, author_id, competency_list)
print("get_save_path_queries: self.path = %s" % self.path)
print("get_save_path_queries: q_list (len=%d): " % len(q_list))
for q in q_list:
print("get_save_path_queries: q = %r" % q)
#return [{}]
return q_list
class Step(Insert):
"""Step w/single competency"""
pass
class Step(Insert):
"""Step w/1 or more competencies"""
def __init__(self, competencyList):
self.query = {}
class OrderingStep(Insert):
"""Step that wraps a path"""
def __init__(self, stepList):
self.query = {}
class PathCreate(Insert):
"""For creating competency_path"""
def __init__(self, opts, data):
web.debug("PathCreate.__init__")
Insert.__init__(self, "competency_path", opts, data)
self.path = "/entity/create"
self.opts = opts
if not "access_token" in opts:
opts["access_token"] = "letmein"
if not "admin_access_tokens" in opts:
opts["admin_access_tokens"] = {"letmein":"LRI_ADMIN_USER_0"}
# For making unique ids
self.randStr = self.makeRandom()
web.debug("PathCreate.__init__: self.randStr = %s" % self.randStr)
self.seenIds = []
self.data = data
self.returnFormat = opts.get("format", "xml")
self.inserts = self.getInserts(self.returnFormat, self.data)
web.debug(repr(self))
web.debug(str(self))
web.debug("PathCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Extracts LRI properties from xml"""
thisName = "parseXml"
web.debug(thisName + ": xml = %s" % xml)
xml = saxutils.unescape(xml)
web.debug(thisName + ": unescaped xml = %s" % xml)
soup = BeautifulSoup(xml)
web.debug(thisName + ": soup = %s" % soup)
pathId = None
tmp = soup.find(key="urn:lri:property_type:id")
if tmp:
pathId = tmp.getText().strip()
pathName = None
tmp = soup.find(key="urn:lri:property_type:name")
if tmp:
pathName = tmp.getText().strip()
pathDesc = None
tmp = soup.find(key="urn:lri:property_type:description")
if tmp:
pathDesc = tmp.getText().strip()
authorName = None
tmp = soup.find(key="urn:lri:property_type:authored_by")
if tmp:
authorName = tmp.getText().strip()
pathSteps = None
tmp = soup.find(key="urn:lri:property_type:path_step")
if tmp:
pathSteps = tmp
web.debug(thisName + ": pathSteps: ")
web.debug(pathSteps)
return pathId, pathName, pathDesc, authorName, pathSteps
def parseJson(self, decodedJson):
return None, None, None, None, []
def makeRandom(self):
"""Makes a random LRI id"""
thisName = "makeRandom"
r = str(random.random()).split(".")[1]
web.debug(thisName + ": r = %s" % r)
return r
def makePath(self, props={}):
"""Makes a path"""
thisName = "makePath"
web.debug(thisName + ": props = %r" % props)
query = {}
if len(props) > 0:
for prop in props:
query[prop] = props[prop]
else:
id = makeId("container")
query["urn:lri:property_type:id"] = id
query["urn:lri:property_type:name"] = self.makeName("container", id)
query["urn:lri:property_type:types"]= ["urn:lri:entity_type:thing",
"urn:lri:entity_type:competency",
"urn:lri:entity_type:competency_path"]
query["urn:lri:property_type:path_step"] = []
web.debug(thisName + ": query: ")
web.debug(query)
return query
def makeId(self, type, ns="lrihelper", commonName="lrihelper"):
"""Makes a path or path_step id"""
thisName = "makeId"
web.debug(thisName + ": type = %s" % type)
web.debug(thisName + ": ns = %s" % ns)
web.debug(thisName + ": commonName = %s" % commonName)
id = "urn:"
id += ns + ":"
id += type + ":"
id += commonName + "-"
id += self.randStr
if id in self.seenIds:
id += "-" + str(len(self.seenIds) + 1)
self.seenIds.append(id)
web.debug(thisName + ": id = %s" % id)
return id
def makeName(self, type, id):
"""Makes a path or path_step name"""
thisName = "makeName"
web.debug(thisName + ": type = %s, id = %s" % (type, id))
parts = id.split(":")
name = " ".join(parts[1:])
web.debug(thisName + ": name = %s" % name)
return name
def makeContainer(self, competencyList):
"""Makes a container to hold given competencies
Allows for multiple competencies per step
"""
thisName = "makeContainer"
query = {}
id = self.makeId("container")
query["urn:lri:property_type:id"] = id
query["urn:lri:property_type:name"] = self.makeName("container", id)
query["urn:lri:property_type:types"] = ["urn:lri:entity_type:thing",
"urn:lri:entity_type:competency",
"urn:lri:entity_type:competency_container"]
query["urn:lri:property_type:contains"] = competencyList
web.debug(thisName + ": id = %s" % id)
web.debug(thisName + ": query: ")
web.debug(query)
return query
def makeStep(self, xml):
"""Makes path_steps from xml"""
thisName = "makeStep"
web.debug(thisName + ": xml = %s" % xml)
pairs = xml.findAll("pair")
stepType = pairs[0].value.getText().strip()
competencyList = []
index = len(pairs) - 1
for value in pairs[index].findAll("value"):
competencyList.append(value.getText().strip())
previous = None
tmp = xml.find(key="previous")
if tmp:
previous = tmp.getText().strip()
orderId = None
tmp = xml.find(key="orderId")
if tmp:
orderId = tmp.getText().strip()
web.debug(thisName + ": stepType = %r" % stepType)
web.debug(thisName + ": competencyList: ")
web.debug(competencyList)
web.debug(thisName + ": previous = %r" % previous)
web.debug(thisName + ": orderId = %r" % orderId)
stepQuery = {}
stepId = self.makeId("step")
stepQuery["urn:lri:property_type:id"] = stepId
stepQuery["urn:lri:property_type:name"] = self.makeName("step", stepId)
if previous:
stepQuery["previous"] = previous
stepQuery["orderId"] = orderId
containerQuery = self.makeContainer(competencyList)
stepQuery["urn:lri:property_type:competency_in_path"] = containerQuery["urn:lri:property_type:id"]
stepQuery["urn:lri:property_type:types"] = ["urn:lri:entity_type:thing",
"urn:lri:entity_type:path_step"]
# XXX: RESUME HERE
web.debug(thisName + ": stepQuery: ")
web.debug(stepQuery)
web.debug(thisName + ": containerQuery: ")
web.debug(containerQuery)
return stepQuery, containerQuery
def makeOrderingStep(self, xml):
"""Makes an ordering path_step
Used to order steps in path
"""
thisName = "makeOrderingStep"
pairs = xml.findAll("pair")
stepType = pairs[0].value.getText().strip()
orderId = pairs[1].value.getText().strip()
web.debug(thisName + ": Creating path")
pathQuery = self.makePath({})
pathSteps = xml.find(key="urn:lri:property_type:path_step")
stepList = []
stepQueries = []
containerQueries = []
for step in pathSteps.findAll("value"):
if step.pair:
stepQuery = {}
if step.pair["key"] == "type":
val = step.pair.value.getText()
if val == "step":
web.debug(thisName + ": Creating step")
stepQuery, containerQuery = self.makeStep(step)
stepQueries.append(stepQuery)
containerQueries.append(containerQuery)
stepList.append(stepQuery["urn:lri:property_type:id"])
pathQuery["urn:lri:property_type:path_step"] = stepList
pathQuery["orderId"] = orderId
return pathQuery, stepQueries, containerQueries
def buildInserts(self, pathId, pathName, pathDesc, authorName, pathSteps):
"""Builds LRI inserts
NOTE: authorName must be LRI id
"""
thisName = "buildInserts"
web.debug(thisName + ": pathId = %s" % pathId)
web.debug(thisName + ": pathName = %s" % pathName)
web.debug(thisName + ": pathDesc = %s" % pathDesc)
web.debug(thisName + ": authorName = %s" % authorName)
web.debug(thisName + ": pathSteps = %r" % pathSteps)
# Create path
props = {
"urn:lri:property_type:id": pathId,
"urn:lri:property_type:name": pathName,
"urn:lri:property_type:description": pathDesc,
"urn:lri:property_type:authored_by": authorName,
}
web.debug(thisName + ": props = %r" % props)
pathQuery = self.makePath(props)
# Create steps
stepList = []
seenSteps = []
queries = []
web.debug(thisName + ": Creating steps: ")
for step in pathSteps.findAll("value"):
if step.pair:
stepQuery = {}
orderingQueries = []
if step.pair["key"] == "type":
stepType = step.pair.value.getText()
if stepType == "step":
stepOrderId = step.findNext("pair").findNext("pair").value.getText()
if not stepOrderId in seenSteps:
# Skip steps makeOrderingStep creates
web.debug(thisName + ": Creating step")
web.debug(step)
stepQuery, containerQuery = self.makeStep(step)
queries.append(containerQuery)
queries.append(stepQuery)
stepList.append(stepQuery["urn:lri:property_type:id"])
seenSteps.append(stepQuery["orderId"])
elif stepType == "ordering":
web.debug(thisName + ": Creating ordering step")
pathQuery, stepQueries, containerQueries = self.makeOrderingStep(step)
for stepQuery in stepQueries:
seenSteps.append(stepQuery["orderId"])
queries += containerQueries
queries += stepQueries
queries.append(pathQuery)
stepList.append(pathQuery["urn:lri:property_type:id"])
# Order steps
for query in queries:
if "urn:lri:entity_type:path_step" not in query["urn:lri:property_type:types"]:
continue
if "previous" not in query:
continue
previous = query["previous"]
for q in queries:
if "urn:lri:entity_type:path_step" not in q["urn:lri:property_type:types"]:
continue
if query == q:
# Skip this
continue
if "orderId" not in q:
# Skip containers
continue
try:
orderId = q["orderId"]
if orderId == previous:
id = q["urn:lri:property_type:id"]
query["urn:lri:property_type:previous"] = id
except KeyError, e:
web.debug("KeyError: q: %r" % q)
raise e
# Remove tmp keys previous and orderId
for query in queries:
if "previous" in query:
del query["previous"]
if "orderId" in query:
del query["orderId"]
pathQuery["urn:lri:property_type:path_step"] = []
for step in stepList:
if step.find("step") != -1:
pathQuery["urn:lri:property_type:path_step"].append(step)
queries.append(pathQuery)
# Convert query to json before sending to LRI server
jsonQueries = []
for query in queries:
jsonQueries.append(json.dumps(query))
return jsonQueries
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts to insert given content in LRI"""
thisName = "getInserts"
web.debug(thisName + ": contentFormat = %s" % contentFormat)
web.debug(thisName + ": content = %s" % content)
self.path = "/entity/create"
if contentFormat == "xml":
pathId, pathName, pathDesc, authorName, pathSteps = self.parseXml(content)
elif contentFormat == "json":
raise web.BadRequest
else:
raise web.BadRequest
queries = self.buildInserts(pathId, pathName, pathDesc, authorName, pathSteps)
print
web.debug(thisName + ": queries: ")
for query in queries:
print query
#return [{}]
return queries
# Path w/multiple competency_in_path in step
class MultiCompetencyStepPath(Insert):
def __init__(self, opts, data):
raise NotImplementedError("MultiCompetencyStepPath is not implemented")
Insert.__init__(self, "MultiCompetencyStepPath", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("MultiCompetencyStepPathCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
pass
def buildInserts(self, props):
pass
def getInserts(self, contentFormat, content):
pass
# Non-linear path
class NonLinearPath(Insert):
def __init__(self, opts, data):
raise NotImplementedError("NonLinearPath is not implemented")
Insert.__init__(self, "NonLinearPath", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("NonLinearPathCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
pass
def buildInserts(self, props):
pass
def getInserts(self, contentFormat, content):
pass
class ResourceCreate(Insert):
"""For creating learning_resources"""
def __init__(self, opts, data):
Insert.__init__(self, "learning_resource", opts, data)
self.path = "/entity/create"
self.opts = opts
self.data = data
self.returnFormat = opts.get("format", "xml")
self.inserts = self.getInserts(self.returnFormat, self.data)
def parseXml(self, xml):
"""Extracts keys from XML doc. Expected format:
<xml>
<pair key='urn:lri:property_type:id'>
<value>resource_id</value>
</pair>
<pair key='urn:lri:property_type:name'>
<value>resource_name</value>
</pair>
<pair key='urn:lri:property_type:description'>
<value>resource_description</value>
</pair>
<pair key='urn:lri:property_type:teaches'>
<value>competency_1_id</value>
<value>competency_2_id</value>
...
<value>competency_n_id</value>
</pair>
</xml>
"""
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
resource_id = soup.find(key="urn:lri:property_type:id").getText().strip()
resource_name = soup.find(key="urn:lri:property_type:name").getText().strip()
resource_desc = soup.find(key="urn:lri:property_type:description").getText().strip()
competency_list = [x.getText().strip() for x in soup.find(key="urn:lri:property_type:teaches").find_all("value")]
return resource_id, resource_name, resource_desc, competency_list
def parseJson(self, decodedJson):
"""Extracts keys from json doc. Expected format:
{
"urn:lri:property_type:id": resource_id,
"urn:lri:property_type:name": resource_name,
"urn:lri:property_type:description": resource_description,
"urn:lri:property_type:teaches": [
competency_id_1,
competency_id_2,
...
competency_id_n
]
}
"""
json_decoded = decodedJson
resource_id = json_decoded["urn:lri:property_type:id"]
resource_name = json_decoded["urn:lri:property_type:name"]
resource_desc = json_decoded["urn:lri:property_type:description"]
competency_list = json_decoded["urn:lri:property_type:teaches"]
return path_id, path_name, path_desc, author_id, competency_list
def buildInserts(self, resourceId, resourceName, resourceDesc, competencyList):
"""Constructs queries that save a learning_resource"""
resource_id = resourceId
resource_name = resourceName
resource_desc = resourceDesc
competency_list = competencyList
# Query to create learning_resource
resource_query = {}
resource_query["urn:lri:property_type:id"] = resource_id
resource_query["urn:lri:property_type:name"] = resource_name
resource_query["urn:lri:property_type:description"] = resource_desc
resource_query["urn:lri:property_type:types"] = ["urn:lri:entity_type:thing",
"urn:lri:entity_type:learning_resource"]
resource_query["urn:lri:property_type:teaches"] = competency_list
# Convert to json
json_query = json.dumps(resource_query)
print("\n\n\nResourceCreate.buildInserts: json_query=%r\n\n\n" % json_query)
return [json_query]
def getInserts(self, contentFormat, content):
"""Returns a list of lri queries that saves a learning_resource"""
content_format = contentFormat
self.path = "/entity/create"
resource_id = resource_name = resource_desc = ""
competency_list = []
if content_format == "xml":
resource_id, resource_name, resource_desc, competency_list = self.parseXml(content)
elif content_format == "json":
resource_id, resource_name, resource_desc, competency_list = self.parseJson(content)
q_list = self.buildInserts(resource_id, resource_name, resource_desc, competency_list)
print("ResourceCreate.getInserts: self.path = %s" % self.path)
print("ResourceCreate.getInserts: self.q_list (len=%d): " % len(q_list))
for q in q_list:
print("ResourceCreate.getInserts: q = %r" % q)
#return [{}]
return q_list
class InitiativeCreate(Insert):
"""Creates an initiative"""
def __init__(self, opts, data):
Insert.__init__(self, "initiative", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("InitiativeCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Parses xml"""
props = Insert.parseXml(self, xml)
return props
def buildInserts(self, props):
"""Builds LRI inserts"""
query = Insert.buildInserts(self, props)
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:competency_container")
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:initiative")
# Convert to json
jsonQuery = json.dumps(query)
print("\n\n\nInitiativeCreate.buildInserts: jsonQuery=%r\n\n\n" % jsonQuery)
return [jsonQuery]
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts"""
qList = Insert.getInserts(self, contentFormat, content)
print("InitiativeCreate.getInserts: self.path = %s" % self.path)
print("InitiativeCreate.: qList (len=%d): " % len(qList))
for q in qList:
print("InitiativeCreate.: q = %r" % q)
return qList
class FrameworkCreate(Insert):
"""Creates a framework"""
def __init__(self, opts, data):
Insert.__init__(self, "framework", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("FrameworkCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Parses XML"""
props = Insert.parseXml(self, xml)
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
try:
key = "urn:lri:property_type:contained_by"
props["parent"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
return props
def buildInserts(self, props):
"""Builds LRI inserts"""
query = Insert.buildInserts(self, props)
if "parent" in props:
query["urn:lri:property_type:contained_by"] = props["parent"]
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:competency_container")
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:framework")
# Convert to json
jsonQuery = json.dumps(query)
print("\n\n\nFrameworkCreate.buildInserts: jsonQuery=%r\n\n\n" % jsonQuery)
return [jsonQuery]
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts"""
qList = Insert.getInserts(self, contentFormat, content)
print("FrameworkCreate.getInserts: self.path = %s" % self.path)
print("FrameworkCreate.: queries (len=%d): " % len(qList))
for q in qList:
print("FrameworkCreate.: query = %r" % q)
return qList
class SetCreate(Insert):
"""Creates a set"""
def __init__(self, opts, data):
Insert.__init__(self, "set", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("SetCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Parses XML"""
props = Insert.parseXml(self, xml)
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
try:
key = "urn:lri:property_type:contained_by"
props["parent"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
return props
def buildInserts(self, props):
"""Builds LRI inserts"""
query = Insert.buildInserts(self, props)
if "parent" in props:
query["urn:lri:property_type:contained_by"] = props["parent"]
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:competency_container")
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:set")
# Convert to json
jsonQuery = json.dumps(query)
print("\n\n\nSetCreate.buildInserts: jsonQuery=%r\n\n\n" % jsonQuery)
return [jsonQuery]
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts"""
qList = Insert.getInserts(self, contentFormat, content)
print("SetCreate.getInserts: self.path = %s" % self.path)
print("SetCreate.: qList (len=%d): " % len(qList))
for q in qList:
print("SetCreate.: q = %r" % q)
return qList
class GradeCreate(Insert):
"""Creates a grade_level"""
def __init__(self, opts, data):
Insert.__init__(self, "grade_level", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("GradeCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Parses XML"""
web.debug("GradeCreate.parseXml")
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
grades = soup.find_all(key="grade_level")
propsList = []
prevGradeId = ""
for grade in grades:
props = self.getBaseProps(grade)
if prevGradeId:
props["previous"] = prevGradeId
try:
key = "urn:lri:property_type:contained_by"
props["parent"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
propsList.append(props)
prevGradeId = props["id"]
web.debug("GradeCreate.parseXml: props: ")
for prop in props:
web.debug("GradeCreate.parseXml: %s : %s" % (prop, props[prop]))
return propsList
def buildInserts(self, propsList):
"""Builds LRI inserts"""
web.debug("GradeCreate.buildInserts")
jsonQueries = []
for props in propsList:
query = Insert.buildInserts(self, props)
if "previous" in props:
query["urn:lri:property_type:previous"] = props["previous"]
if "parent" in props:
query["urn:lri:property_type:contained_by"] = props["parent"]
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:competency_container")
query["urn:lri:property_type:types"].append("urn:ccss:entity_type:grade_level")
# Convert to json
jsonQuery = json.dumps(query)
print("\n\n\nGradeCreate.buildInserts: jsonQuery=%r\n\n\n" % jsonQuery)
jsonQueries.append(jsonQuery)
return jsonQueries
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts"""
web.debug("GradeCreate.getInserts")
qList = Insert.getInserts(self, contentFormat, content)
print("GradeCreate.getInserts: self.path = %s" % self.path)
print("GradeCreate.getInserts: qList (len=%d): " % len(qList))
for q in qList:
print("GradeCreate.: q = %r" % q)
return qList
class DomainCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("DomainCreate is not implemented")
class ClusterCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("ClusterCreate is not implemented")
class StandardCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("StandardCreate is not implemented")
class ComponentCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("ComponentCreate is not implemented")
class StrandCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("StrandCreate is not implemented")
class SectionCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("SectionCreate is not implemented")
class AnchorCreate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("AnchorCreate is not implemented")
class CompetencyCreate(Insert):
"""/ccss/competency/create
Creates a competency
"""
def __init__(self, opts, data):
Insert.__init__(self, "competency", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("CompetencyCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Parse XML"""
props = Insert.parseXml(self, xml)
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
try:
key = "urn:lri:property_type:contained_by"
props["parent"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
try:
key = "urn:lri:property_type:completion_criterion"
props["completion_criterion"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
return props
def parseJson(self, decodedJson):
""" TODO """
pass
def buildInserts(self, props):
"""Builds LRI inserts"""
query = Insert.buildInserts(self, props)
if "parent" in props:
query["urn:lri:property_type:contained_by"] = props["parent"]
if "completion_criterion" in props:
query["urn:lri:property_type:completion_criterion"] = props["completion_criterion"]
query["urn:lri:property_type:types"].append("urn:lri:entity_type:competency")
jsonQuery = json.dumps(query)
print("\n\n\nCompetencyCreate.buildInserts: jsonQuery=%r\n\n\n" % jsonQuery)
return [jsonQuery]
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts"""
qList = Insert.getInserts(self, contentFormat, content)
web.debug("CompetencyCreate: getInserts: queries: ")
for q in qList:
web.debug("CompetencyCreate: getInserts: query = %s" % q)
return qList
class ContainerCreate(Insert):
"""Creates a competency_container"""
def __init__(self, opts, data):
Insert.__init__(self, "competency_container", opts, data)
self.inserts = self.getInserts(self.returnFormat, self.data)
print(repr(self))
print(str(self))
web.debug("ContainerCreate.__init__: urls = %r" % self.getUrls())
def parseXml(self, xml):
"""Parses XML"""
props = Insert.parseXml(self, xml)
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
try:
key = "urn:lri:property_type:contained_by"
props["parent"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
try:
key = "urn:lri:property_type:completion_criterion"
props["completion_criterion"] = soup.find(key=key).getText().strip()
except AttributeError, e:
web.debug("Key not found: %s: %r" % (key, e))
return props
def parseJson(self, decodedJson):
""" TODO """
pass
def buildInserts(self, props):
"""Builds LRI inserts"""
query = Insert.buildInserts(self, props)
if "parent" in props:
query["urn:lri:property_type:contained_by"] = props["parent"]
if "completion_criterion" in props:
query["urn:lri:property_type:completion_criterion"] = props["completion_criterion"]
query["urn:lri:property_type:types"].append("urn:lri:entity_type:competency_container")
jsonQuery = json.dumps(query)
print("\n\n\nContainerCreate.buildInserts: jsonQuery=%r\n\n\n" % jsonQuery)
return [jsonQuery]
def getInserts(self, contentFormat, content):
"""Returns list of LRI inserts"""
qList = Insert.getInserts(self, contentFormat, content)
web.debug("ContainerCreate: getInserts: queries: ")
for q in qList:
web.debug("ContainerCreate: getInserts: query = %s" % q)
return qList
##### Update #####
# Generic routines independent of CCSS or LRI type
def parseXml(xml):
"""Parses XML"""
web.debug("parseXml")
xml = saxutils.unescape(xml)
soup = BeautifulSoup(xml)
id = soup.find(key='urn:lri:property_type:id').getText().strip()
web.debug("parseXml: id = %s" % id)
updates = soup.find(key='updates')
web.debug("parseXml: updates = %r" % updates)
props = soup.find_all(key='property')
web.debug("parseXml: props = %r" % props)
updates = []
for prop in props:
web.debug("parseXml: prop = %r" % prop)
all = prop.findAll()
web.debug("parseXml: all = %r" % all)
property = all[0].getText()
web.debug("parseXml: property = %s" % property)
value = all[1].getText()
web.debug("parseXml: value = %s" % value)
updates.append((id, property, value))
return updates
def get(path, query, opts):
"""Does a GET"""
web.debug("get")
httpConfig = httpconfig.HttpConfig(web.ctx.env["DOCUMENT_ROOT"])
url = "http://%s:%d/%s?q=%s&opts=%s" % (httpConfig.config["serverhost"],
httpConfig.config["serverport"],
path,
query,
opts)
response = requests.get(url)
web.debug("get: url = %s" % url)
web.debug("get: status = %s" % response.status_code)
return response
def updateProperty(guid, value, opts):
"""Updates an existing LRI property
* guid: LRI property GUID
* value: New value
"""
web.debug("updateProperty")
path = "/property/update"
query = '{"guid":"%s","value":"%s"}' % (guid, value)
web.debug("updateProperty: ", path, query, opts)
response = get(path, query, opts)
return response
def createProperty(iguid, property, value, opts):
"""Creates a new LRI property
* iguid: LRI entity GUID
* property: LRI property name
* value: New value
"""
web.debug("createProperty")
path = "/property/create"
query = '{"from":"%s","%s":"%s"}' % (iguid, property, value)
web.debug("createProperty: ", path, query, opts)
response = get(path, query, opts)
return response
def runUpdates(updates, opts):
"""Run list of LRI updates"""
web.debug("runUpdates")
path = "/entity/search"
if not "details" in opts:
opts["details"] = True
if not "access_token" in opts:
opts["access_token"] = "letmein"
if not "admin_access_tokens" in opts:
opts["admin_access_tokens"] = {"letmein":"LRI_ADMIN_USER_0"}
opts = json.dumps(opts)
responses = []
web.debug("runUpdates: %d updates" % len(updates))
for update in updates:
web.debug(" ===== update %d ===== " % updates.index(update))
id, property, value = update
web.debug("runUpdates: %s, %s, %s" %( id, property, value))
# Get guid of property
query = '{"urn:lri:property_type:id":"%s"}' % id
response = get(path, query, opts)
web.debug("runUpdates: status = %s" % response.json["status"])
if "message" in response.json:
web.debug("runUpdates: message = %s" % response.json["message"])
guid = ""
iguid = ""
web.debug("runUpdates: response.json = %r" % response.json)
for r in response.json["response"]:
web.debug("runUpdates: in response")
props = r["props"]
for prop in props:
web.debug("runUpdates: in props")
if len(iguid) == 0:
iguid = props["urn:lri:property_type:guid"]
web.debug("runUpdates: iguid = %s" % iguid)
web.debug("runUpdates: prop = %s" % prop)
web.debug("runUpdates: property = %s" % property)
if prop == property:
for key in props[prop]:
web.debug("runUpdates: key = %s" % key)
vals = props[prop][key]
for val in vals:
web.debug("runUpdates: val = %s" % val)
guid = val["guid"]
web.debug("runUpdates: guid = %s" % guid)
if guid == "":
# Property doesn't exist
# Create it
response = createProperty(iguid, property, value, opts)
responses.append(response)
else:
# Property exists
# Update it
web.debug("runUpdates: guid=%s, value=%s, opts=%r" % (guid, value, opts))
response = updateProperty(guid, value, opts)
responses.append(response)
return responses
##### Do not need #####
class Update(object):
def __init__(self, opts, data, httpConfig=None):
raise NotImplementedError("Update is not implemented")
class InitiativeUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("InitiativeUpdate is not implemented")
class FrameworkUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("FrameworkUpdate is not implemented")
class SetUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("SetUpdate is not implemented")
class DomainUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("DomainUpdate is not implemented")
class ClusterUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("ClusterUpdate is not implemented")
class StandardUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("StandardUpdate is not implemented")
class ComponentUpdate(Insert):
def __init__(self, opts, data):
raise NonImplementedError("ComponentUpdate is not implemented")
class StrandUpdate(Insert):
def __init__(self, opts, data):
raise NonImplementedError("StrandUpdate is not implemented")
class SectionUpdate(Insert):
def __init__(self, opts, data):
raise NonImplementedError("SectionUpdate is not implemented")
class AnchorUpdate(Insert):
def __init__(self, opts, data):
raise NonImplementedError("AnchorUpdate is not implemented")
class CompetencyUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("CompetencyUpdate is not implemented")
class ContainerUpdate(Insert):
def __init__(self, opts, data):
raise NotImplementedError("ContainerUpdate is not implemented")
| inbloom/legacy-projects | lri-middleware/ccss/insert.py | Python | apache-2.0 | 56,321 | 0.003196 |
#*******************************************************************************
# Copyright (c) 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#******************************************************************************/
import requests
import sys
import os
import json
from helpers.dbutils import CloudantDbUtils
from helpers.acmeair_utils import AcmeAirUtils
import conftest
# get the cloudant credentials from pytest config file
test_properties = conftest.test_properties()
class DataLoader:
"""
Test data loader related functions
"""
def load_AcmeData(self, num_of_cust):
"""
Reset databases and use the AcmeAir database loader to populate initial customer,
flight and airportmapping data. Does NOT generate user data like bookings.
"""
print ("num_of_cust: ", num_of_cust)
acmeair = AcmeAirUtils()
try:
if acmeair.is_acmeair_running() != 0:
raise RuntimeError("""
AcmeAir is already running which may cause unexpected results when
resetting databases. Please shut down the app and try again.
""")
else:
cloudantUtils = CloudantDbUtils(test_properties)
cloudantUtils.reset_databases()
acmeair.start_acmeair()
acmeair.load_data(num_of_cust)
finally:
acmeair.stop_acmeair()
def remove_AcmeDb(self, num_of_cust):
"""
Drop all AcmeAir databases
"""
acmeair = AcmeAirUtils()
if acmeair.is_acmeair_running() != 0:
acmeair.stop_acmeair()
cloudantUtils = CloudantDbUtils(test_properties)
cloudantUtils.drop_all_databases()
def load_SpecCharValuePredicateData(self):
"""
Create booking data needed to test SpecCharValuePredicate
"""
try:
acmeair = AcmeAirUtils()
acmeair.start_acmeair()
# book flights AA93 and AA330
flight1 = "AA93"
flight2 = "AA330"
# Step#1 - need to find the flights generated _id required for booking
flight1_id = acmeair.get_flightId_by_number(flight1)
print ("{} id = {}".format(flight1, flight1_id))
flight2_id = acmeair.get_flightId_by_number(flight2)
print ("{} id = {}".format(flight2, flight2_id))
# Step#2 - add the boooking
acmeair.book_flights("uid0@email.com", flight1, flight2)
finally:
acmeair.stop_acmeair()
if __name__ =='__main__':
"""
Utility to create test databases and load data
"""
import argparse
parser = argparse.ArgumentParser(description="Utility to load AcmeAir data required for python spark-cloudant tests")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-cleanup', action='store_true', help='Drop all test databases')
group.add_argument('-load', help='Reset and Load databases with the given # of users. -load 0 to just recreate databases and indexes.', type=int)
args = parser.parse_args()
dataloader = DataLoader()
if args.load is not None:
if args.load == 0:
cloudantUtils = CloudantDbUtils(test_properties)
cloudantUtils.reset_databases()
else:
dataloader.load_AcmeData(args.load)
dataloader.load_SpecCharValuePredicateData()
elif args.cleanup:
cloudantUtils = CloudantDbUtils(test_properties)
cloudantUtils.drop_all_databases()
| snowch/spark-cloudant | test/helpers/dataload.py | Python | apache-2.0 | 3,656 | 0.033643 |
# Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_utils import versionutils
import nova.conf
from nova.db import api as db
from nova import exception
from nova.i18n import _
from nova import objects
from nova.objects import base as obj_base
from nova.objects import fields
CONF = nova.conf.CONF
# TODO(berrange): Remove NovaObjectDictCompat
@obj_base.NovaObjectRegistry.register
class Network(obj_base.NovaPersistentObject, obj_base.NovaObject,
obj_base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: Added in_use_on_host()
# Version 1.2: Added mtu, dhcp_server, enable_dhcp, share_address
VERSION = '1.2'
fields = {
'id': fields.IntegerField(),
'label': fields.StringField(),
'injected': fields.BooleanField(),
'cidr': fields.IPV4NetworkField(nullable=True),
'cidr_v6': fields.IPV6NetworkField(nullable=True),
'multi_host': fields.BooleanField(),
'netmask': fields.IPV4AddressField(nullable=True),
'gateway': fields.IPV4AddressField(nullable=True),
'broadcast': fields.IPV4AddressField(nullable=True),
'netmask_v6': fields.IPV6AddressField(nullable=True),
'gateway_v6': fields.IPV6AddressField(nullable=True),
'bridge': fields.StringField(nullable=True),
'bridge_interface': fields.StringField(nullable=True),
'dns1': fields.IPAddressField(nullable=True),
'dns2': fields.IPAddressField(nullable=True),
'vlan': fields.IntegerField(nullable=True),
'vpn_public_address': fields.IPAddressField(nullable=True),
'vpn_public_port': fields.IntegerField(nullable=True),
'vpn_private_address': fields.IPAddressField(nullable=True),
'dhcp_start': fields.IPV4AddressField(nullable=True),
'rxtx_base': fields.IntegerField(nullable=True),
'project_id': fields.UUIDField(nullable=True),
'priority': fields.IntegerField(nullable=True),
'host': fields.StringField(nullable=True),
'uuid': fields.UUIDField(),
'mtu': fields.IntegerField(nullable=True),
'dhcp_server': fields.IPAddressField(nullable=True),
'enable_dhcp': fields.BooleanField(),
'share_address': fields.BooleanField(),
}
@staticmethod
def _convert_legacy_ipv6_netmask(netmask):
"""Handle netmask_v6 possibilities from the database.
Historically, this was stored as just an integral CIDR prefix,
but in the future it should be stored as an actual netmask.
Be tolerant of either here.
"""
try:
prefix = int(netmask)
return netaddr.IPNetwork('1::/%i' % prefix).netmask
except ValueError:
pass
try:
return netaddr.IPNetwork(netmask).netmask
except netaddr.AddrFormatError:
raise ValueError(_('IPv6 netmask "%s" must be a netmask '
'or integral prefix') % netmask)
def obj_make_compatible(self, primitive, target_version):
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (1, 2):
if 'mtu' in primitive:
del primitive['mtu']
if 'enable_dhcp' in primitive:
del primitive['enable_dhcp']
if 'dhcp_server' in primitive:
del primitive['dhcp_server']
if 'share_address' in primitive:
del primitive['share_address']
@staticmethod
def _from_db_object(context, network, db_network):
for field in network.fields:
db_value = db_network[field]
if field == 'netmask_v6' and db_value is not None:
db_value = network._convert_legacy_ipv6_netmask(db_value)
elif field == 'dhcp_server' and db_value is None:
db_value = db_network['gateway']
network[field] = db_value
network._context = context
network.obj_reset_changes()
return network
@obj_base.remotable_classmethod
def get_by_id(cls, context, network_id, project_only='allow_none'):
db_network = db.network_get(context, network_id,
project_only=project_only)
return cls._from_db_object(context, cls(), db_network)
@obj_base.remotable_classmethod
def get_by_uuid(cls, context, network_uuid):
db_network = db.network_get_by_uuid(context, network_uuid)
return cls._from_db_object(context, cls(), db_network)
@obj_base.remotable_classmethod
def get_by_cidr(cls, context, cidr):
db_network = db.network_get_by_cidr(context, cidr)
return cls._from_db_object(context, cls(), db_network)
# TODO(stephenfin): This is no longer used and can be removed
@obj_base.remotable_classmethod
def associate(cls, context, project_id, network_id=None, force=False):
db.network_associate(context, project_id, network_id=network_id,
force=force)
# TODO(stephenfin): This is no longer used and can be removed
@obj_base.remotable_classmethod
def disassociate(cls, context, network_id, host=False, project=False):
db.network_disassociate(context, network_id, host, project)
@obj_base.remotable_classmethod
def in_use_on_host(cls, context, network_id, host):
return db.network_in_use_on_host(context, network_id, host)
def _get_primitive_changes(self):
changes = {}
for key, value in self.obj_get_changes().items():
if isinstance(value, netaddr.IPAddress):
changes[key] = str(value)
else:
changes[key] = value
return changes
@obj_base.remotable
def create(self):
updates = self._get_primitive_changes()
if 'id' in updates:
raise exception.ObjectActionError(action='create',
reason='already created')
db_network = db.network_create_safe(self._context, updates)
self._from_db_object(self._context, self, db_network)
@obj_base.remotable
def destroy(self):
db.network_delete_safe(self._context, self.id)
self.deleted = True
self.obj_reset_changes(['deleted'])
@obj_base.remotable
def save(self):
context = self._context
updates = self._get_primitive_changes()
if 'netmask_v6' in updates:
# NOTE(danms): For some reason, historical code stores the
# IPv6 netmask as just the CIDR mask length, so convert that
# back here before saving for now.
updates['netmask_v6'] = netaddr.IPNetwork(
updates['netmask_v6']).netmask
set_host = 'host' in updates
if set_host:
db.network_set_host(context, self.id, updates.pop('host'))
if updates:
db_network = db.network_update(context, self.id, updates)
elif set_host:
db_network = db.network_get(context, self.id)
else:
db_network = None
if db_network is not None:
self._from_db_object(context, self, db_network)
@obj_base.NovaObjectRegistry.register
class NetworkList(obj_base.ObjectListBase, obj_base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Added get_by_project()
# Version 1.2: Network <= version 1.2
VERSION = '1.2'
fields = {
'objects': fields.ListOfObjectsField('Network'),
}
@obj_base.remotable_classmethod
def get_all(cls, context, project_only='allow_none'):
db_networks = db.network_get_all(context, project_only)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
@obj_base.remotable_classmethod
def get_by_uuids(cls, context, network_uuids, project_only='allow_none'):
db_networks = db.network_get_all_by_uuids(context, network_uuids,
project_only)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
@obj_base.remotable_classmethod
def get_by_host(cls, context, host):
db_networks = db.network_get_all_by_host(context, host)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
@obj_base.remotable_classmethod
def get_by_project(cls, context, project_id, associate=True):
db_networks = db.project_get_networks(context, project_id,
associate=associate)
return obj_base.obj_make_list(context, cls(context), objects.Network,
db_networks)
| rahulunair/nova | nova/objects/network.py | Python | apache-2.0 | 9,406 | 0 |
# micro
# Copyright (C) 2021 micro contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU
# Lesser General Public License as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this program.
# If not, see <http://www.gnu.org/licenses/>.
"""micro errors."""
import builtins
from typing import Dict
class Error(Exception):
"""Base for micro errors."""
def json(self) -> Dict[str, object]:
"""Return a JSON representation of the error."""
return {'__type__': type(self).__name__, 'message': str(self)}
class ValueError(builtins.ValueError, Error):
"""See :ref:`ValueError`."""
class AuthenticationError(Error):
"""See :ref:`AuthenticationError`."""
class PermissionError(Error):
"""See :ref:`PermissionError`."""
| noyainrain/micro | micro/error.py | Python | lgpl-3.0 | 1,216 | 0.007401 |
'''
Created on Jan 6, 2013
__author__ = "Elizabeth 'pidge' Flanagan"
__copyright__ = "Copyright 2012-2013, Intel Corp."
__credits__ = ["Elizabeth Flanagan"]
__license__ = "GPL"
__version__ = "2.0"
__maintainer__ = "Elizabeth Flanagan"
__email__ = "elizabeth.flanagan@intel.com"
'''
from buildbot.steps.shell import ShellCommand
from buildbot.process.buildstep import LogLineObserver
from distutils.version import StrictVersion
import os
class BuildImages(ShellCommand):
haltOnFailure = False
flunkOnFailure = True
name = "BuildImages"
def __init__(self, factory, argdict=None, **kwargs):
self.layerversion_yoctobsp=None
self.machine=""
self.images=""
self._pendingLogObservers = []
self.factory = factory
for k, v in argdict.iteritems():
setattr(self, k, v)
# Timeout needs to be passed to LoggingBuildStep as a kwarg
self.timeout = 100000
kwargs['timeout']=self.timeout
ShellCommand.__init__(self, **kwargs)
def start(self):
self.layerversion_yoctobsp = self.getProperty("layerversion_yoctobsp")
self.layerversion_core = self.getProperty("layerversion_core")
self.machine = self.getProperty("MACHINE")
# core-image-basic rename
# See: http://git.yoctoproject.org/cgit/cgit.cgi/poky/commit/?id=b7f1cca517bbd4191828c6bae32e0c5041f1ff19
# I hate making people change their configs, so support both.
if self.layerversion_core < "4":
self.images=self.images.replace("core-image-full-cmdline", "core-image-basic")
else:
self.images=self.images.replace("core-image-basic", "core-image-full-cmdline")
if self.layerversion_yoctobsp is not None and int(self.layerversion_yoctobsp) < 2 and self.machine is not None and self.machine == "genericx86-64":
self.command = "echo 'Skipping Step.'"
else:
bitbakeflags = "-k "
# -w only exists in bitbake 1.25 and newer, use distroversion string and make sure we're on poky >1.7
if self.getProperty('bitbakeversion') and StrictVersion(self.getProperty('bitbakeversion')) >= StrictVersion("1.25"):
bitbakeflags += "-w "
self.command = ". ./oe-init-build-env; bitbake " + bitbakeflags + self.images
self.description = ["Building " + str(self.images)]
ShellCommand.start(self)
def describe(self, done=False):
description = ShellCommand.describe(self, done)
if self.layerversion_yoctobsp is not None and int(self.layerversion_yoctobsp) < 2 and self.machine is not None and self.machine == "genericx86-64":
description.append("genericx86-64 does not exist in this branch. Skipping")
return description
| joeythesaint/yocto-autobuilder | lib/python2.7/site-packages/autobuilder/buildsteps/BuildImages.py | Python | gpl-2.0 | 2,787 | 0.0061 |
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.util import ImmutableLazyDataList
from hazelcast.protocol.codec.replicated_map_message_type import *
REQUEST_TYPE = REPLICATEDMAP_REMOVEENTRYLISTENER
RESPONSE_TYPE = 101
RETRYABLE = True
def calculate_size(name, registration_id):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += calculate_size_str(registration_id)
return data_size
def encode_request(name, registration_id):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name, registration_id))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.append_str(registration_id)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
parameters['response'] = client_message.read_bool()
return parameters
| cangencer/hazelcast-python-client | hazelcast/protocol/codec/replicated_map_remove_entry_listener_codec.py | Python | apache-2.0 | 1,220 | 0.001639 |
from django.contrib import admin
from repository.models import Repository, RepositoryAccess, RepositoryStar, RepositoryFork
admin.site.register(Repository)
admin.site.register(RepositoryStar)
admin.site.register(RepositoryFork)
admin.site.register(RepositoryAccess)
| Djacket/djacket | core/backend/repository/admin.py | Python | mit | 269 | 0.003717 |
import sys
if sys.version > '3':
PY3 = True
else:
PY3 = False
from .uarm import UArm, UArmConnectException
from .config import ua_dir, home_dir
from .util import get_uarm
from .version import __version__
| uArm-Developer/pyuarm | pyuarm/__init__.py | Python | mit | 215 | 0.004651 |
import logging
from zentral.core.exceptions import ImproperlyConfigured
default_app_config = "zentral.core.compliance_checks.apps.ZentralComplianceChecksAppConfig"
logger = logging.getLogger("zentral.core.compliance_checks")
# compliance checks classes
compliance_check_classes = {}
def register_compliance_check_class(compliance_check_class):
try:
model = compliance_check_class.get_model()
except AttributeError:
raise ImproperlyConfigured('Not a valid compliance check class')
if model in compliance_check_classes:
raise ImproperlyConfigured(f'Compliance check class "{model}" already registered')
compliance_check_classes[model] = compliance_check_class
logger.debug('Compliance check class "%s" registered', model)
def compliance_check_class_from_model(model):
try:
return compliance_check_classes[model]
except KeyError:
logger.error('Unknown compliance check model "%s"', model)
# BaseComplianceCheck registered in .compliance_checks
return compliance_check_classes["BaseComplianceCheck"]
| zentralopensource/zentral | zentral/core/compliance_checks/__init__.py | Python | apache-2.0 | 1,092 | 0.001832 |
#!/usr/bin/env python
# encoding: utf-8
"""A ${VISUAL} placeholder that will use the text that was last visually
selected and insert it here. If there was no text visually selected, this will
be the empty string. """
import re
import textwrap
from UltiSnips import _vim
from UltiSnips.indent_util import IndentUtil
from UltiSnips.text_objects._transformation import TextObjectTransformation
from UltiSnips.text_objects._base import NoneditableTextObject
_REPLACE_NON_WS = re.compile(r"[^ \t]")
class Visual(NoneditableTextObject, TextObjectTransformation):
"""See module docstring."""
def __init__(self, parent, token):
# Find our containing snippet for visual_content
snippet = parent
while snippet:
try:
self._text = snippet.visual_content.text
self._mode = snippet.visual_content.mode
break
except AttributeError:
snippet = snippet._parent # pylint:disable=protected-access
if not self._text:
self._text = token.alternative_text
self._mode = "v"
NoneditableTextObject.__init__(self, parent, token)
TextObjectTransformation.__init__(self, token)
def _update(self, done):
if self._mode == "v": # Normal selection.
text = self._text
else: # Block selection or line selection.
text_before = _vim.buf[self.start.line][:self.start.col]
indent = _REPLACE_NON_WS.sub(" ", text_before)
iu = IndentUtil()
indent = iu.indent_to_spaces(indent)
indent = iu.spaces_to_indent(indent)
text = ""
for idx, line in enumerate(textwrap.dedent(
self._text).splitlines(True)):
if idx != 0:
text += indent
text += line
text = text[:-1] # Strip final '\n'
text = self._transform(text)
self.overwrite(text)
self._parent._del_child(self) # pylint:disable=protected-access
return True
| eduardomallmann/vim-and-bash | pythonx/UltiSnips/text_objects/_visual.py | Python | gpl-3.0 | 2,074 | 0.000965 |
version_info = (1, 4, 2)
__version__ = '.'.join(map(str, version_info))
| Vdragon/git-cola | qtpy/_version.py | Python | gpl-2.0 | 72 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009 CamptoCamp. All rights reserved.
# @author Nicolas Bessi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
_logger = logging.getLogger(__name__)
class AbstractClassError(Exception):
def __str__(self):
return 'Abstract Class'
def __repr__(self):
return 'Abstract Class'
class AbstractMethodError(Exception):
def __str__(self):
return 'Abstract Method'
def __repr__(self):
return 'Abstract Method'
class UnknowClassError(Exception):
def __str__(self):
return 'Unknown Class'
def __repr__(self):
return 'Unknown Class'
class UnsuportedCurrencyError(Exception):
def __init__(self, value):
self.curr = value
def __str__(self):
return 'Unsupported currency %s' % self.curr
def __repr__(self):
return 'Unsupported currency %s' % self.curr
class Currency_getter_factory():
"""Factory pattern class that will return
a currency getter class base on the name passed
to the register method
"""
def register(self, class_name):
allowed = [
'CH_ADMIN_getter',
'PL_NBP_getter',
'ECB_getter',
'GOOGLE_getter',
'YAHOO_getter',
'MX_BdM_getter',
'CA_BOC_getter',
'RO_BNR_getter',
'BG_CUSTOMS_getter',
'BG_SIBANK_getter',
'BG_UNICRDT_getter',
]
if class_name in allowed:
exec "from .update_service_%s import %s" % (class_name.replace('_getter', ''), class_name)
class_def = eval(class_name)
_logger.info("from .update_service_%s import %s: class_def %s:" % (class_name.replace('_getter', ''), class_name, class_def))
return class_def()
else:
raise UnknowClassError
| rosenvladimirov/addons | currency_rate_update_bg/services/currency_getter.py | Python | agpl-3.0 | 2,676 | 0.001121 |
#!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2016 The Bitcoin Core developers
# Copyright (c) 2013-2019 The Mincoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# mininode.py - Mincoin P2P network half-a-node
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a mincoin node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# mincoin/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import time
import sys
import random
from .util import hex_str_to_bytes, bytes_to_hex_str
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
import litecoin_scrypt
from test_framework.siphash import siphash256
BIP0031_VERSION = 60000
MY_VERSION = 70014 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
COIN = 100000000 # 1 btc in satoshis
NODE_NETWORK = (1 << 0)
NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def deser_compact_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return nit
def deser_string(f):
nit = deser_compact_size(f)
return f.read(nit)
def ser_string(s):
return ser_compact_size(len(s)) + s
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
# ser_function_name: Allow for an alternate serialization function on the
# entries in the vector (we use this for serializing the vector of transactions
# for a witness block).
def ser_vector(l, ser_function_name=None):
r = ser_compact_size(len(l))
for i in l:
if ser_function_name:
r += getattr(i, ser_function_name)()
else:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ser_compact_size(len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
# Objects that map to mincoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
MSG_WITNESS_FLAG = 1<<30
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block",
1|MSG_WITNESS_FLAG: "WitnessTx",
2|MSG_WITNESS_FLAG : "WitnessBlock",
4: "CompactBlock"
}
def __init__(self, t=0, h=0):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
bytes_to_hex_str(self.scriptPubKey))
class CScriptWitness(object):
def __init__(self):
# stack is a vector of strings
self.stack = []
def __repr__(self):
return "CScriptWitness(%s)" % \
(",".join([bytes_to_hex_str(x) for x in self.stack]))
def is_null(self):
if self.stack:
return False
return True
class CTxInWitness(object):
def __init__(self):
self.scriptWitness = CScriptWitness()
def deserialize(self, f):
self.scriptWitness.stack = deser_string_vector(f)
def serialize(self):
return ser_string_vector(self.scriptWitness.stack)
def __repr__(self):
return repr(self.scriptWitness)
def is_null(self):
return self.scriptWitness.is_null()
class CTxWitness(object):
def __init__(self):
self.vtxinwit = []
def deserialize(self, f):
for i in range(len(self.vtxinwit)):
self.vtxinwit[i].deserialize(f)
def serialize(self):
r = b""
# This is different than the usual vector serialization --
# we omit the length of the vector, which is required to be
# the same length as the transaction's vin vector.
for x in self.vtxinwit:
r += x.serialize()
return r
def __repr__(self):
return "CTxWitness(%s)" % \
(';'.join([repr(x) for x in self.vtxinwit]))
def is_null(self):
for x in self.vtxinwit:
if not x.is_null():
return False
return True
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.wit = CTxWitness()
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = tx.sha256
self.hash = tx.hash
self.wit = copy.deepcopy(tx.wit)
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
flags = 0
if len(self.vin) == 0:
flags = struct.unpack("<B", f.read(1))[0]
# Not sure why flags can't be zero, but this
# matches the implementation in mincoind
if (flags != 0):
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
else:
self.vout = deser_vector(f, CTxOut)
if flags != 0:
self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
self.wit.deserialize(f)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize_without_witness(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
# Only serialize with witness when explicitly called for
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
if (len(self.wit.vtxinwit) != len(self.vin)):
# vtxinwit must have the same length as vin
self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)]
for i in range(len(self.wit.vtxinwit), len(self.vin)):
self.wit.vtxinwit.append(CTxInWitness())
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
# Regular serialization is without witness -- must explicitly
# call serialize_with_witness to include witness data.
def serialize(self):
return self.serialize_without_witness()
# Recalculate the txid (transaction hash without witness)
def rehash(self):
self.sha256 = None
self.calc_sha256()
# We will only cache the serialization without witness in
# self.sha256 and self.hash -- those are expected to be the txid.
def calc_sha256(self, with_witness=False):
if with_witness:
# Don't cache the result, just return it
return uint256_from_str(hash256(self.serialize_with_witness()))
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize_without_witness()))
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.scrypt256 = header.scrypt256
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
self.scrypt256 = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
self.scrypt256 = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(hash256(r))
self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii')
self.scrypt256 = uint256_from_str(litecoin_scrypt.getPoWHash(r))
def rehash(self):
self.sha256 = None
self.scrypt256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=False):
r = b""
r += super(CBlock, self).serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
r += ser_vector(self.vtx)
return r
# Calculate the merkle root given a vector of transaction hashes
def get_merkle_root(self, hashes):
while len(hashes) > 1:
newhashes = []
for i in range(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
return self.get_merkle_root(hashes)
def calc_witness_merkle_root(self):
# For witness root purposes, the hash of the
# coinbase, with witness, is defined to be 0...0
hashes = [ser_uint256(0)]
for tx in self.vtx[1:]:
# Calculate the hashes with witness data
hashes.append(ser_uint256(tx.calc_sha256(True)))
return self.get_merkle_root(hashes)
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.scrypt256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.scrypt256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = b""
self.strStatusBar = b""
self.strReserved = b""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = b""
self.vchSig = b""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = b""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
class PrefilledTransaction(object):
def __init__(self, index=0, tx = None):
self.index = index
self.tx = tx
def deserialize(self, f):
self.index = deser_compact_size(f)
self.tx = CTransaction()
self.tx.deserialize(f)
def serialize(self, with_witness=False):
r = b""
r += ser_compact_size(self.index)
if with_witness:
r += self.tx.serialize_with_witness()
else:
r += self.tx.serialize_without_witness()
return r
def serialize_with_witness(self):
return self.serialize(with_witness=True)
def __repr__(self):
return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
# This is what we send on the wire, in a cmpctblock message.
class P2PHeaderAndShortIDs(object):
def __init__(self):
self.header = CBlockHeader()
self.nonce = 0
self.shortids_length = 0
self.shortids = []
self.prefilled_txn_length = 0
self.prefilled_txn = []
def deserialize(self, f):
self.header.deserialize(f)
self.nonce = struct.unpack("<Q", f.read(8))[0]
self.shortids_length = deser_compact_size(f)
for i in range(self.shortids_length):
# shortids are defined to be 6 bytes in the spec, so append
# two zero bytes and read it in as an 8-byte number
self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
self.prefilled_txn = deser_vector(f, PrefilledTransaction)
self.prefilled_txn_length = len(self.prefilled_txn)
# When using version 2 compact blocks, we must serialize with_witness.
def serialize(self, with_witness=False):
r = b""
r += self.header.serialize()
r += struct.pack("<Q", self.nonce)
r += ser_compact_size(self.shortids_length)
for x in self.shortids:
# We only want the first 6 bytes
r += struct.pack("<Q", x)[0:6]
if with_witness:
r += ser_vector(self.prefilled_txn, "serialize_with_witness")
else:
r += ser_vector(self.prefilled_txn)
return r
def __repr__(self):
return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
# P2P version of the above that will use witness serialization (for compact
# block version 2)
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
def serialize(self):
return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
expected_shortid = siphash256(k0, k1, tx_hash)
expected_shortid &= 0x0000ffffffffffff
return expected_shortid
# This version gets rid of the array lengths, and reinterprets the differential
# encoding into indices that can be used for lookup.
class HeaderAndShortIDs(object):
def __init__(self, p2pheaders_and_shortids = None):
self.header = CBlockHeader()
self.nonce = 0
self.shortids = []
self.prefilled_txn = []
self.use_witness = False
if p2pheaders_and_shortids != None:
self.header = p2pheaders_and_shortids.header
self.nonce = p2pheaders_and_shortids.nonce
self.shortids = p2pheaders_and_shortids.shortids
last_index = -1
for x in p2pheaders_and_shortids.prefilled_txn:
self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
last_index = self.prefilled_txn[-1].index
def to_p2p(self):
if self.use_witness:
ret = P2PHeaderAndShortWitnessIDs()
else:
ret = P2PHeaderAndShortIDs()
ret.header = self.header
ret.nonce = self.nonce
ret.shortids_length = len(self.shortids)
ret.shortids = self.shortids
ret.prefilled_txn_length = len(self.prefilled_txn)
ret.prefilled_txn = []
last_index = -1
for x in self.prefilled_txn:
ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
last_index = x.index
return ret
def get_siphash_keys(self):
header_nonce = self.header.serialize()
header_nonce += struct.pack("<Q", self.nonce)
hash_header_nonce_as_str = sha256(header_nonce)
key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
return [ key0, key1 ]
# Version 2 compact blocks use wtxid in shortids (rather than txid)
def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False):
self.header = CBlockHeader(block)
self.nonce = nonce
self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
self.shortids = []
self.use_witness = use_witness
[k0, k1] = self.get_siphash_keys()
for i in range(len(block.vtx)):
if i not in prefill_list:
tx_hash = block.vtx[i].sha256
if use_witness:
tx_hash = block.vtx[i].calc_sha256(with_witness=True)
self.shortids.append(calculate_shortid(k0, k1, tx_hash))
def __repr__(self):
return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
class BlockTransactionsRequest(object):
def __init__(self, blockhash=0, indexes = None):
self.blockhash = blockhash
self.indexes = indexes if indexes != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
indexes_length = deser_compact_size(f)
for i in range(indexes_length):
self.indexes.append(deser_compact_size(f))
def serialize(self):
r = b""
r += ser_uint256(self.blockhash)
r += ser_compact_size(len(self.indexes))
for x in self.indexes:
r += ser_compact_size(x)
return r
# helper to set the differentially encoded indexes from absolute ones
def from_absolute(self, absolute_indexes):
self.indexes = []
last_index = -1
for x in absolute_indexes:
self.indexes.append(x-last_index-1)
last_index = x
def to_absolute(self):
absolute_indexes = []
last_index = -1
for x in self.indexes:
absolute_indexes.append(x+last_index+1)
last_index = absolute_indexes[-1]
return absolute_indexes
def __repr__(self):
return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
class BlockTransactions(object):
def __init__(self, blockhash=0, transactions = None):
self.blockhash = blockhash
self.transactions = transactions if transactions != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
self.transactions = deser_vector(f, CTransaction)
def serialize(self, with_witness=False):
r = b""
r += ser_uint256(self.blockhash)
if with_witness:
r += ser_vector(self.transactions, "serialize_with_witness")
else:
r += ser_vector(self.transactions)
return r
def __repr__(self):
return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
# Objects that correspond to messages on the wire
class msg_version(object):
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
self.nRelay = MY_RELAY
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
if self.nVersion >= 70001:
# Relay field is optional for version 70001 onwards
try:
self.nRelay = struct.unpack("<b", f.read(1))[0]
except:
self.nRelay = 0
else:
self.nRelay = 0
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
r += struct.pack("<b", self.nRelay)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i nRelay=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight, self.nRelay)
class msg_verack(object):
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize_without_witness()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_witness_tx(msg_tx):
def serialize(self):
return self.tx.serialize_with_witness()
class msg_block(object):
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
# for cases where a user needs tighter control over what is sent over the wire
# note that the user must supply the name of the command, and the data
class msg_generic(object):
def __init__(self, command, data=None):
self.command = command
self.data = data
def serialize(self):
return self.data
def __repr__(self):
return "msg_generic()"
class msg_witness_block(msg_block):
def serialize(self):
r = self.block.serialize(with_witness=True)
return r
class msg_getaddr(object):
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = b"ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = b"ping"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders(object):
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = b"headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in mincoind indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# Helper function
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf')):
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
class msg_feefilter(object):
command = b"feefilter"
def __init__(self, feerate=0):
self.feerate = feerate
def deserialize(self, f):
self.feerate = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.feerate)
return r
def __repr__(self):
return "msg_feefilter(feerate=%08x)" % self.feerate
class msg_sendcmpct(object):
command = b"sendcmpct"
def __init__(self):
self.announce = False
self.version = 1
def deserialize(self, f):
self.announce = struct.unpack("<?", f.read(1))[0]
self.version = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<?", self.announce)
r += struct.pack("<Q", self.version)
return r
def __repr__(self):
return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
class msg_cmpctblock(object):
command = b"cmpctblock"
def __init__(self, header_and_shortids = None):
self.header_and_shortids = header_and_shortids
def deserialize(self, f):
self.header_and_shortids = P2PHeaderAndShortIDs()
self.header_and_shortids.deserialize(f)
def serialize(self):
r = b""
r += self.header_and_shortids.serialize()
return r
def __repr__(self):
return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
class msg_getblocktxn(object):
command = b"getblocktxn"
def __init__(self):
self.block_txn_request = None
def deserialize(self, f):
self.block_txn_request = BlockTransactionsRequest()
self.block_txn_request.deserialize(f)
def serialize(self):
r = b""
r += self.block_txn_request.serialize()
return r
def __repr__(self):
return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
class msg_blocktxn(object):
command = b"blocktxn"
def __init__(self):
self.block_transactions = BlockTransactions()
def deserialize(self, f):
self.block_transactions.deserialize(f)
def serialize(self):
r = b""
r += self.block_transactions.serialize()
return r
def __repr__(self):
return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
class msg_witness_blocktxn(msg_blocktxn):
def serialize(self):
r = b""
r += self.block_transactions.serialize(with_witness=True)
return r
# This is what a callback should look like for NodeConn
# Reimplement the on_* functions to provide handling for events
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
# deliver_sleep_time is helpful for debugging race conditions in p2p
# tests; it causes message delivery to sleep for the specified time
# before acquiring the global lock and delivering the next message.
self.deliver_sleep_time = None
# Remember the services our peer has advertised
self.peer_services = None
def set_deliver_sleep_time(self, value):
with mininode_lock:
self.deliver_sleep_time = value
def get_deliver_sleep_time(self):
with mininode_lock:
return self.deliver_sleep_time
# Spin until verack message is received from the node.
# Tests may want to use this as a signal that the test can begin.
# This can be called from the testing thread, so it needs to acquire the
# global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
def deliver(self, conn, message):
deliver_sleep = self.get_deliver_sleep_time()
if deliver_sleep is not None:
time.sleep(deliver_sleep)
with mininode_lock:
try:
getattr(self, 'on_' + message.command.decode('ascii'))(conn, message)
except:
print("ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0]))
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
conn.nServices = message.nServices
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_block(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_reject(self, conn, message): pass
def on_open(self, conn): pass
def on_close(self, conn): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
def on_feefilter(self, conn, message): pass
def on_sendheaders(self, conn, message): pass
def on_sendcmpct(self, conn, message): pass
def on_cmpctblock(self, conn, message): pass
def on_getblocktxn(self, conn, message): pass
def on_blocktxn(self, conn, message): pass
# More useful callbacks and functions for NodeConnCB's which have a single NodeConn
class SingleNodeConnCB(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def send_and_ping(self, message):
self.send_message(message)
self.sync_with_ping()
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node
def sync_with_ping(self, timeout=30):
def received_pong():
return (self.last_pong.nonce == self.ping_counter)
self.send_message(msg_ping(nonce=self.ping_counter))
success = wait_until(received_pong, timeout=timeout)
self.ping_counter += 1
return success
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
b"version": msg_version,
b"verack": msg_verack,
b"addr": msg_addr,
b"alert": msg_alert,
b"inv": msg_inv,
b"getdata": msg_getdata,
b"getblocks": msg_getblocks,
b"tx": msg_tx,
b"block": msg_block,
b"getaddr": msg_getaddr,
b"ping": msg_ping,
b"pong": msg_pong,
b"headers": msg_headers,
b"getheaders": msg_getheaders,
b"reject": msg_reject,
b"mempool": msg_mempool,
b"feefilter": msg_feefilter,
b"sendheaders": msg_sendheaders,
b"sendcmpct": msg_sendcmpct,
b"cmpctblock": msg_cmpctblock,
b"getblocktxn": msg_getblocktxn,
b"blocktxn": msg_blocktxn
}
MAGIC_BYTES = {
"mainnet": b"\x63\x42\x21\x2c", # mainnet
"testnet4": b"\x80\x80\xd8\xe9", # testnet4
"regtest": b"\x80\xf3\xf8\xe2", # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=NODE_NETWORK, send_version=True):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = b""
self.recvbuf = b""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
self.nServices = 0
if send_version:
# stuff version msg into sendbuf
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print('MiniNode: Connecting to Mincoin Node IP # ' + dstaddr + ':' \
+ str(dstport))
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def show_debug_msg(self, msg):
self.log.debug(msg)
def handle_connect(self):
if self.state != "connected":
self.show_debug_msg("MiniNode: Connected & Listening: \n")
self.state = "connected"
self.cb.on_open(self)
def handle_close(self):
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
% (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
pre_connection = self.state == "connecting"
length = len(self.sendbuf)
return (length > 0 or pre_connection)
def handle_write(self):
with mininode_lock:
# asyncore does not expose socket connection, only the first read/write
# event, thus we must check connection manually here to know when we
# actually connect
if self.state == "connecting":
self.handle_connect()
if not self.writable():
return
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = BytesIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
self.show_debug_msg("Unknown command: '" + command + "' " +
repr(msg))
except Exception as e:
print('got_data:', repr(e))
# import traceback
# traceback.print_tb(sys.exc_info()[2])
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
raise IOError('Not connected, no pushbuf')
self.show_debug_msg("Send %s" % repr(message))
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == b"version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap[b'ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap[b'ping']())
self.show_debug_msg("Recv %s" % repr(message))
self.cb.deliver(self, message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| xieta/mincoin | qa/rpc-tests/test_framework/mininode.py | Python | mit | 55,509 | 0.00191 |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import json
import command_processor
import http_server
import spectator_client
def millis_to_time(millis):
"""Convert milliseconds to a time string."""
return datetime.fromtimestamp(millis / 1000).isoformat('T') + 'Z'
def strip_non_html_params(options):
"""Return a copy of options with only those that are query parameters.
This is to propagate options in web response URLs.
"""
params = {}
for key in ['tagNameRegex', 'tagValueRegex', 'metricNameRegex']:
if key in options:
params[key] = options[key]
return params
class BaseSpectatorCommandHandler(command_processor.CommandHandler):
def make_spectator_client(self, options):
return spectator_client.SpectatorClient(options)
def add_argparser(self, subparsers):
parser = super(BaseSpectatorCommandHandler, self).add_argparser(subparsers)
parser.add_argument('--by', default='service',
help='Organize by "service" or by "metric" name.')
spectator_client.SpectatorClient.add_standard_parser_arguments(parser)
return parser
def _get_data_map(self, catalog, options):
restrict_services = options.get('services', None)
if restrict_services:
catalog = {service: config
for service, config in catalog.items()
if service in restrict_services.split(',')}
spectator = self.make_spectator_client(options)
by = options.get('by', 'service')
if by == 'service':
data_map = spectator.scan_by_service(catalog, params=options)
else:
data_map = spectator.scan_by_type(catalog, params=options)
return data_map
class DumpMetricsHandler(BaseSpectatorCommandHandler):
def process_commandline_request(self, options):
catalog = spectator_client.get_source_catalog(options)
data_map = self._get_data_map(catalog, options)
json_text = json.JSONEncoder(indent=2).encode(data_map)
self.output(options, json_text)
def process_web_request(self, request, path, params, fragment):
options = dict(command_processor.get_global_options())
options.update(params)
catalog = spectator_client.get_source_catalog(options)
param_services = params.get('services', 'all').split(',')
if param_services == ['all']:
restricted_catalog = catalog
else:
restricted_catalog = {key: value
for key, value in catalog.items()
if key in param_services}
data_map = self._get_data_map(restricted_catalog, options)
body = json.JSONEncoder(indent=2).encode(data_map)
request.respond(200, {'ContentType': 'application/json'}, body)
class ExploreCustomDescriptorsHandler(BaseSpectatorCommandHandler):
"""Show all the current descriptors in use, and who is using them."""
def __get_type_and_tag_map_and_active_services(self, catalog, options):
spectator = self.make_spectator_client(options)
type_map = spectator.scan_by_type(catalog, params=options)
service_tag_map, active_services = self.to_service_tag_map(type_map)
return type_map, service_tag_map, active_services
def process_commandline_request(self, options):
catalog = spectator_client.get_source_catalog(options)
type_map, service_tag_map, active_services = (
self.__get_type_and_tag_map_and_active_services(
catalog, options))
params = strip_non_html_params(options)
html = self.to_html(type_map, service_tag_map, active_services, params)
html_doc = http_server.build_html_document(
html, title='Metric Usage')
self.output(options, html_doc)
def process_web_request(self, request, path, params, fragment):
options = dict(command_processor.get_global_options())
options.update(params)
catalog = spectator_client.get_source_catalog(options)
type_map, service_tag_map, active_services = (
self.__get_type_and_tag_map_and_active_services(catalog, options))
params = strip_non_html_params(options)
html = self.to_html(type_map, service_tag_map, active_services, params)
html_doc = http_server.build_html_document(
html, title='Metric Usage')
request.respond(200, {'ContentType': 'text/html'}, html_doc)
@staticmethod
def to_service_tag_map(type_map):
service_tag_map = {}
active_services = set()
def process_endpoint_values_helper(key, service, values):
if not isinstance(values, dict):
return
tagged_data = values.get('values', [])
for tagged_point in tagged_data:
tag_map = {tag['key']: tag['value']
for tag in tagged_point.get('tags')}
if not tag_map:
tag_map = {None: None}
if key not in service_tag_map:
service_tag_map[key] = {service: [tag_map]}
else:
service_map = service_tag_map[key]
if service in service_map:
service_map[service].append(tag_map)
else:
service_map[service] = [tag_map]
for key, entry in sorted(type_map.items()):
# pylint: disable=bad-indentation
for service, value_list in sorted(entry.items()):
active_services.add(service)
for value in value_list:
process_endpoint_values_helper(key, service, value)
return service_tag_map, active_services
@staticmethod
def to_tag_service_map(columns, service_tag_map):
tag_service_map = {}
for service, tags in service_tag_map.items():
service_index = columns[service]
for tag_group in tags:
for tag_name, tag_value in tag_group.items():
if tag_name not in tag_service_map:
tag_service_map[tag_name] = [set() for ignore in columns]
tag_service_map[tag_name][service_index].add(tag_value)
return tag_service_map
def to_html(self, type_map, service_tag_map, active_services, params=None):
header_html = ['<tr>', '<th>Metric</th>', '<th>Label</th>']
columns = {}
for service_name in sorted(active_services):
columns[service_name] = len(columns)
header_html.append('<th><A href="/show?services={0}">{0}</A></th>'.format(
service_name))
header_html.append('</tr>')
html = ['<table border=1>']
html.extend(header_html)
for type_name, service_tag_map in sorted(service_tag_map.items()):
tag_service_map = self.to_tag_service_map(columns, service_tag_map)
num_labels = len(tag_service_map)
row_html = ['<tr>']
row_span = ' rowspan={0}'.format(num_labels) if num_labels > 1 else ''
query_params = dict(params or {})
query_params['meterNameRegex'] = type_name
metric_url = '/show{0}'.format(self.params_to_query(query_params))
row_html.append(
'<td{row_span}><A href="{url}">{type_name}</A></td>'.format(
row_span=row_span, url=metric_url, type_name=type_name))
for label_name, service_values in tag_service_map.items():
if label_name is None:
row_html.append('<td></td>')
else:
row_html.append(
'<td><A href="/explore?tagNameRegex={0}">{0}</A></td>'.format(
label_name))
for value_set in service_values:
if value_set == set([None]):
row_html.append('<td>n/a</td>')
else:
row_html.append(
'<td>{0}</td>'.format(', '.join(
['<A href="/explore?tagValueRegex={v}">{v}</A>'.format(
v=value)
for value in sorted(value_set)])))
row_html.append('</tr>')
html.append(''.join(row_html))
row_html = ['<tr>'] # prepare for next row if needed
html.append('</table>')
return '\n'.join(html)
class TagValue(object):
def __init__(self, tag):
self.key = tag['key']
self.value = tag['value']
def __hash__(self):
return hash((self.key, self.value))
def __eq__(self, value):
return self.key == value.key and self.value == value.value
def __repr__(self):
return self.__str__()
def __str__(self):
return '{0}={1}'.format(self.key, self.value)
def as_html(self):
return '<code><b>{0}</b>={1}</code>'.format(self.key, self.value)
class ShowCurrentMetricsHandler(BaseSpectatorCommandHandler):
"""Show all the current metric values."""
def process_commandline_request(self, options):
catalog = spectator_client.get_source_catalog(options)
data_map = self._get_data_map(catalog, options)
by = options.get('by', 'service')
if by == 'service':
content_data = self.service_map_to_text(data_map, params=options)
else:
content_data = self.type_map_to_text(data_map, params=options)
self.output(options, content_data)
def process_web_request(self, request, path, params, fragment):
options = dict(command_processor.get_global_options())
options.update(params)
catalog = spectator_client.get_source_catalog(options)
data_map = self._get_data_map(catalog, options)
if self.accepts_content_type(request, 'text/html'):
content_type = 'text/html'
by_service = self.service_map_to_html
by_type = self.type_map_to_html
else:
content_type = 'text/plain'
by_service = self.service_map_to_text
by_type = self.type_map_to_text
by = options.get('by', 'service')
if by == 'service':
content_data = by_service(data_map, params=params)
else:
content_data = by_type(data_map, params=params)
if content_type == 'text/html':
body = http_server.build_html_document(
content_data, title='Current Metrics')
else:
body = content_data
request.respond(200, {'ContentType': content_type}, body)
def all_tagged_values(self, value_list):
all_values = []
for data in value_list:
tags = [TagValue(tag) for tag in data.get('tags', [])]
all_values.append((tags, data['values']))
return all_values
def data_points_to_td(self, data_points):
if len(data_points) == 1:
point = data_points[0]
return '<td>{time}</td><td>{value}</td>'.format(
time=millis_to_time(point['t']), value=point['v'])
td_html = '<td colspan=2><table>'
for point in data_points:
td_html += '<tr><td>{time}</td><td>{value}</td></tr>'.format(
time=millis_to_time(point['t']),
value=point['v'])
td_html += '</tr></table></td>'
return td_html
def data_points_to_text(self, data_points):
text = []
for point in data_points:
text.append('{time} {value}'.format(
time=millis_to_time(point['t']),
value=point['v']))
return ', '.join(text)
def service_map_to_text(self, service_map, params=None):
lines = []
def process_metrics_helper(metrics):
for key, value in metrics.items():
tagged_values = self.all_tagged_values(value.get('values'))
parts = ['Service "{0}"'.format(service)]
parts.append(' {0}'.format(key))
for one in tagged_values:
tag_list = one[0]
tag_text = ', '.join([str(elem) for elem in tag_list])
time_values = self.data_points_to_text(one[1])
parts.append(' Tags={0}'.format(tag_text))
parts.append(' Values={0}'.format(time_values))
lines.append('\n'.join(parts))
for service, entry_list in sorted(service_map.items()):
for entry in entry_list or []:
process_metrics_helper(entry.get('metrics', {}))
return '\n\n'.join(lines)
def service_map_to_html(self, service_map, params=None):
column_headers_html = ('<tr><th>Service</th><th>Key</th><th>Tags</th>'
'<th>Timestamp</th><th>Value</th></tr>')
result = ['<table>',
'<tr><th>Service</th><th>Metric</th>'
'<th>Timestamp</th><th>Values</th><th>Labels</th></tr>']
def process_metrics_helper(metrics):
for key, value in metrics.items():
# pylint: disable=bad-indentation
tagged_values = self.all_tagged_values(value.get('values'))
service_url = '/show{0}'.format(
self.params_to_query({'services': service}))
metric_url = '/show{0}'.format(
self.params_to_query({'meterNameRegex': key}))
html = (
'<tr>'
'<th rowspan={rowspan}><A href="{service_url}">{service}</A></th>'
'<th rowspan={rowspan}><A href="{metric_url}">{key}</A></th>'
.format(rowspan=len(tagged_values),
service_url=service_url,
service=service,
metric_url=metric_url,
key=key))
for one in tagged_values:
tag_list = one[0]
tag_html = '<br/>'.join([elem.as_html() for elem in tag_list])
time_value_td = self.data_points_to_td(one[1])
html += '{time_value_td}<td>{tag_list}</td></tr>'.format(
time_value_td=time_value_td, tag_list=tag_html)
result.append(html)
html = '<tr>'
for service, entry_list in sorted(service_map.items()):
for entry in entry_list or []:
process_metrics_helper(entry.get('metrics', {}))
result.append('</table>')
return '\n'.join(result)
def type_map_to_text(self, type_map, params=None):
lines = []
def process_values_helper(values):
tagged_values = self.all_tagged_values(values)
for tag_value in tagged_values:
text_key = ', '.join([str(tag) for tag in tag_value[0]])
tag_to_service_values[text_key] = (service, tag_value[1])
for key, entry in sorted(type_map.items()):
tag_to_service_values = {}
for service, value_list in sorted(entry.items()):
for value in value_list:
process_values_helper(value.get('values'))
parts = ['Metric "{0}"'.format(key)]
for tags_text, values in sorted(tag_to_service_values.items()):
parts.append(' Service "{0}"'.format(values[0]))
parts.append(' Value: {0}'.format(
self.data_points_to_text(values[1])))
parts.append(' Tags: {0}'.format(tags_text))
lines.append('\n'.join(parts))
return '\n\n'.join(lines)
def type_map_to_html(self, type_map, params=None):
"""Helper function to render descriptor usage into text."""
column_headers_html = ('<tr><th>Key</th><th>Timestamp</th><th>Value</th>'
'<th>Service</th><th>Tags</th></tr>')
row_html = []
def process_values_helper(values):
tagged_values = self.all_tagged_values(values)
for tag_value in tagged_values:
html_key = '<br/>'.join([tag.as_html() for tag in tag_value[0]])
tag_to_service_values[html_key] = (service, tag_value[1])
for key, entry in sorted(type_map.items()):
tag_to_service_values = {}
for service, value_list in sorted(entry.items()):
for value in value_list or []:
process_values_helper(value.get('values'))
row_html.append('<tr><td rowspan={rowspan}><b>{key}</b></td>'.format(
rowspan=len(tag_to_service_values), key=key))
sep = ''
for tags_html, values in sorted(tag_to_service_values.items()):
time_value_td = self.data_points_to_td(values[1])
row_html.append('{sep}{time_value_td}'
'<td><i>{service}</i></td><td>{tags}</td></tr>'
.format(sep=sep, time_value_td=time_value_td,
service=values[0], tags=tags_html))
sep = '<tr>'
return '<table>\n{header}\n{rows}\n</table>'.format(
header=column_headers_html, rows='\n'.join(row_html))
def add_handlers(handler_list, subparsers):
command_handlers = [
ShowCurrentMetricsHandler(
'/show', 'show', 'Show current metric JSON for all Spinnaker.'),
DumpMetricsHandler(
'/dump', 'dump',
'Show current raw metric JSON from all the servers.'),
ExploreCustomDescriptorsHandler(
'/explore', 'explore',
'Explore metric type usage across Spinnaker microservices.')
]
for handler in command_handlers:
handler.add_argparser(subparsers)
handler_list.append(handler)
| okoye/spinnaker-monitoring | spinnaker-monitoring-daemon/spinnaker-monitoring/spectator_handlers.py | Python | apache-2.0 | 16,780 | 0.007867 |
#
# Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""
binlog_purge_rpl test for ms test and BUG#22543517 running binlogpurge
on second master added to slave replication channels
"""
import replicate_ms
from mysql.utilities.exception import MUTLibError
_CHANGE_MASTER = ("CHANGE MASTER TO MASTER_HOST = 'localhost', "
"MASTER_USER = 'rpl', MASTER_PASSWORD = 'rpl', "
"MASTER_PORT = {0}, MASTER_AUTO_POSITION=1 "
"FOR CHANNEL 'master-{1}'")
def flush_server_logs_(server, times=5):
"""Flush logs on a server
server[in] the instance server where to flush logs on
times[in] number of times to flush the logs.
"""
# Flush master binary log
server.exec_query("SET sql_log_bin = 0")
for _ in range(times):
server.exec_query("FLUSH LOCAL BINARY LOGS")
server.exec_query("SET sql_log_bin = 1")
class test(replicate_ms.test):
"""test binlog purge Utility
This test runs the mysqlbinlogpurge utility on a known topology.
"""
master_datadir = None
slaves = None
mask_ports = []
def check_prerequisites(self):
if not self.servers.get_server(0).check_version_compat(5, 7, 6):
raise MUTLibError("Test requires server version 5.7.6 or later")
return self.check_num_servers(1)
def setup(self):
self.res_fname = "result.txt"
res = super(test, self).setup()
if not res:
return False
# Setup multiple channels for slave
m1_dict = self.get_connection_values(self.server2)
m2_dict = self.get_connection_values(self.server3)
for master in [self.server2, self.server3]:
master.exec_query("SET SQL_LOG_BIN= 0")
master.exec_query("GRANT REPLICATION SLAVE ON *.* TO 'rpl'@'{0}' "
"IDENTIFIED BY 'rpl'".format(self.server1.host))
master.exec_query("SET SQL_LOG_BIN= 1")
self.server1.exec_query("SET GLOBAL relay_log_info_repository = "
"'TABLE'")
self.server1.exec_query(_CHANGE_MASTER.format(m1_dict[3], 1))
self.server1.exec_query(_CHANGE_MASTER.format(m2_dict[3], 2))
self.server1.exec_query("START SLAVE")
return True
def run(self):
test_num = 0
master1_conn = self.build_connection_string(self.server2).strip(' ')
master2_conn = self.build_connection_string(self.server3).strip(' ')
cmd_str = "mysqlbinlogpurge.py --master={0} ".format(master1_conn)
cmd_opts = ("--discover-slaves={0} --dry-run "
"".format(master1_conn.split('@')[0]))
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: with discover "
"and verbose options - master 1".format(test_num))
cmds = ("{0} {1} {2} -vv"
"").format(cmd_str, cmd_opts, "binlog_purge{0}.log".format(1))
res = self.run_test_case(0, cmds, comment)
if not res:
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
cmd_str = "mysqlbinlogpurge.py --master={0} ".format(master2_conn)
test_num += 1
comment = ("Test case {0} - mysqlbinlogpurge: with discover "
"and verbose options - master 2".format(test_num))
cmds = ("{0} {1} {2} -vv"
"").format(cmd_str, cmd_opts, "binlog_purge{0}.log".format(2))
res = self.run_test_case(0, cmds, comment)
if not res:
raise MUTLibError("{0}: failed".format(comment))
flush_server_logs_(self.server1)
super(test, self).reset_ms_topology()
return True
def get_result(self):
# If run method executes successfully without throwing any exceptions,
# then test was successful
return True, None
def record(self):
# Not a comparative test
return True
def cleanup(self):
return super(test, self).cleanup()
| mysql/mysql-utilities | mysql-test/suite/replication/t/binlog_purge_ms.py | Python | gpl-2.0 | 4,696 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0006_updater_updaterevent'),
('fanfunding', '0002_auto_20160416_0621'),
]
operations = [
migrations.RenameField(
model_name='fanfundingevent',
old_name='ffu',
new_name='updater',
),
migrations.RemoveField(
model_name='fanfundingevent',
name='funding_id',
),
migrations.RemoveField(
model_name='fanfundingevent',
name='id',
),
migrations.RemoveField(
model_name='fanfundingupdate',
name='failure_count',
),
migrations.RemoveField(
model_name='fanfundingupdate',
name='id',
),
migrations.RemoveField(
model_name='fanfundingupdate',
name='last_failure',
),
migrations.RemoveField(
model_name='fanfundingupdate',
name='last_failure_message',
),
migrations.RemoveField(
model_name='fanfundingupdate',
name='last_update',
),
migrations.AddField(
model_name='fanfundingevent',
name='updaterevent_ptr',
field=models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, default=1, serialize=False, to='main.UpdaterEvent'),
preserve_default=False,
),
migrations.AddField(
model_name='fanfundingupdate',
name='updater_ptr',
field=models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, default=1, serialize=False, to='main.Updater'),
preserve_default=False,
),
]
| google/mirandum | alerts/fanfunding/migrations/0003_auto_20160416_2023.py | Python | apache-2.0 | 2,470 | 0.002429 |
# Copyright 2011 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoCStudentInfo updating MapReduce."""
import logging
from mapreduce import operation
# MapReduce requires these models to have been imported.
# pylint: disable=unused-import
from soc.models.user import User
from soc.modules.gsoc.models.profile import GSoCStudentInfo
from soc.modules.gsoc.models.proposal import GSoCProposal
from soc.modules.gsoc.models.project import GSoCProject
# pylint: enable=unused-import
def process(student_info):
logging.debug("Converting student_info '%r'", student_info.key())
proposals = GSoCProposal.all().ancestor(student_info.parent_key()).fetch(1000)
projects = GSoCProject.all().ancestor(student_info.parent_key()).fetch(1000)
proposals = [i for i in proposals if i.status != 'withdrawn']
projects = [i for i in projects if i.status != 'withdrawn']
nr_proposals = len(proposals)
nr_projects = len(projects)
orgs = [GSoCProject.org.get_value_for_datastore(i) for i in projects]
student_info.number_of_proposals = nr_proposals
student_info.number_of_projects = nr_projects
student_info.project_for_orgs = orgs
yield operation.db.Put(student_info)
yield operation.counters.Increment("student_infos_converted")
yield operation.counters.Increment("proposals_counted", delta=nr_proposals)
yield operation.counters.Increment("projects_counted", delta=nr_projects)
| rhyolight/nupic.son | app/soc/mapreduce/convert_student_info.py | Python | apache-2.0 | 1,918 | 0.008342 |
#!/usr/bin/python
from __future__ import print_function
from guild.actor import Actor, actor_method, process_method, late_bind
class Dog(Actor):
@actor_method # Input - triggered by data coming in
def woof(self):
print("Woof", self)
@process_method # Process - triggered each time it's run
def process(self):
#print(" ", end="")
pass
@late_bind # Output
def produce(self):
pass
class Shitzu(Dog):
def __init__(self):
self.count = 0
super(Dog, self).__init__()
@process_method
def process(self):
self.count += 1
print("I don't go meow", self.count)
if self.count >= 20:
self.stop()
return False
if __name__ == "__main__":
import time
dog = Dog()
shitzu = Shitzu()
dog.start()
shitzu.start()
dog.woof()
shitzu.woof()
time.sleep(0.1)
shitzu.join()
time.sleep(0.1)
dog.stop()
dog.join()
| sparkslabs/guild | examples/dogs_go_woof_actors.py | Python | apache-2.0 | 988 | 0.001012 |
import sys
from gnucash import *
from gnucash import _sw_app_utils
from gnucash import _sw_core_utils
from gnucash._sw_core_utils import gnc_prefs_is_extra_enabled, gnc_prefs_is_debugging_enabled
from gi import require_version
require_version('Gtk', '3.0')
from gi.repository import Gtk
import os
sys.path.append(os.path.dirname(__file__))
# output file location if gnucash has been started with
# gnucash --extra
if gnc_prefs_is_extra_enabled():
print("Python shell init file: %s" % (__file__))
print("\n" + "The following string should appear translated in your preferred language:" + "\n")
print("\n" + _("Welcome to GnuCash") +"\n")
# Importing the console class causes SIGTTOU to be thrown if GnuCash is
# started in the background. This causes a hang if it is not handled,
# so ignore it for the duration
import signal
old_sigttou = signal.signal(signal.SIGTTOU, signal.SIG_IGN)
import pycons.console as cons
# Restore the SIGTTOU handler
signal.signal(signal.SIGTTOU, old_sigttou)
# output debug information if gnucash has been started with
# gnucash --debug --extra
if gnc_prefs_is_extra_enabled() and gnc_prefs_is_debugging_enabled():
print("Hello from python!\n")
print("sys.modules.keys(): ", sys.modules.keys(), "\n")
print("dir(_sw_app_utils): ", dir(_sw_app_utils), "\n")
#session = app_utils.gnc_get_current_session()
#root account can later on be accessed by session.get_book().get_root_account()
#print("test", dir(root), root.__class__)
print("dir(gnucash_core_c): ", dir(gnucash_core_c))
#acct = Account(instance = root)
#print("test3", dir(acct))
#print(acct.GetName())
#print(acct.GetBalance())
#print(acct.GetSplitList())
#print("test2", dir(gnucash.gnucash_core_c))
class Console (cons.Console):
""" GTK python console """
def __init__(self, argv=[], shelltype='python', banner=[],
filename=None, size=100, user_local_ns=None, user_global_ns=None):
cons.Console.__init__(self, argv, shelltype, banner, filename, size,
user_local_ns=user_local_ns, user_global_ns=user_global_ns)
self.buffer.create_tag('center',
justification=Gtk.Justification.CENTER,
font='Mono 4')
self.figures = []
self.callbacks = []
self.last_figure = None
self.active_canvas = None
self.view.connect ('key-press-event', self.key_press_event)
self.view.connect ('button-press-event', self.button_press_event)
self.view.connect ('scroll-event', self.scroll_event)
def key_press_event (self, widget, event):
""" Handle key press event """
if self.active_canvas:
self.active_canvas.emit ('key-press-event', event)
return True
return cons.Console.key_press_event (self, widget, event)
def scroll_event (self, widget, event):
""" Scroll event """
if self.active_canvas:
return True
return False
def button_press_event (self, widget, event):
""" Button press event """
return self.refresh()
def quit_event (self, widget, event):
""" Event handler for closing of console window """
return self.quit()
def refresh (self):
""" Refresh drawing """
for fig in self.figures:
figure, canvas, anchor = fig
canvas.draw()
return False
def quit (self):
""" quit """
self.write("\n" + _("Have a nice day!") + "\n")
return super(Console, self).quit()
# Change this to "if True:" to switch on a python console at gnucash
# startup:
# shelltype can either be "python" or "ipython" (the latter is not yet fully functional)
if False:
shelltype = "python"
if shelltype=="python":
shelltypeName = "Python"
else:
shelltypeName = "IPython"
banner_style = 'title'
# TRANSLATORS: %s is either Python or IPython
banner = _("Welcome to GnuCash %s Shell") % shelltypeName
console = Console(argv = [], shelltype = shelltype, banner = [[banner, banner_style]], size = 100)
window = Gtk.Window(type = Gtk.WindowType.TOPLEVEL)
window.set_position(Gtk.WindowPosition.CENTER)
window.set_default_size(800,600)
window.set_border_width(0)
console = Console(argv = [], shelltype = shelltype, banner = [[banner, banner_style]],
size = 100, user_local_ns=locals(), user_global_ns=globals())
window.connect('destroy-event', console.quit_event)
window.connect('delete-event', console.quit_event)
window.add (console)
window.show_all()
console.grab_focus()
| muehlburger/gnucash | gnucash/python/init.py | Python | gpl-2.0 | 4,709 | 0.014865 |
# -*- coding:utf-8 -*-
import math
import web
import tx
def GetAdsPagination(assetid,page) :
html = ''
html = html + '<div name="pages" align="center">\n'
if assetid != None :
count = web.collection_ads.find({"asset":assetid}).count()
else :
count = web.collection_ads.find({"asset":{"$ne":"0"}}).count()
if count == 0 :
return ''
pages = count / web.ADS_PER_PAGE
if count % web.ADS_PER_PAGE != 0 :
pages = pages + 1
if page <= 4 :
displaystart = 1
else :
if page - 4 > 1 :
displaystart = page - 4
else :
displaystart = 1
if page >= pages - 4 and pages > 9 :
displaystart = pages - 9
displayend = pages
else :
if pages <= 9 :
displayend = pages
else :
displayend = displaystart + 9
if assetid != None :
html = html + '<a href="/address/' + assetid + '/page/' + str(1) + '"><<</a> '
else :
html = html + '<a href="/address/page/' + str(1) + '"><<</a> '
for i in range(displaystart,displayend+1) :
if i != page :
if assetid != None :
html = html + '<a href="/address/' + assetid + '/page/' + str(i) + '">' + str(i) + '</a> '
else :
html = html + '<a href="/address/page/' + str(i) + '">' + str(i) + '</a> '
else :
html = html + str(i) + ' '
if assetid != None :
html = html + '<a href="/address/' + assetid + '/page/' + str(pages) + '">>></a> '
else :
html = html + '<a href="/address/page/' + str(pages) + '">>></a> '
html = html + '<br/>\n'
html = html + '</div>\n'
return html
def GetAddressInternal(assetid,page,listnum) :
if page <= 0 :
return 'page index begin: 1'
start = (page-1) * listnum
html = ''
html = html + '<div class="container">\n'
html = html + '<table width="80%" border="0" cellpadding="3" cellspacing="0" align="center">'
html = html + '<tr align="left">'
html = html + '<th>'+ _("Address") +'</th><th>'+ _("AdsAsset") +'</th><th>'+ _("Value") +'</th><th>'+ _("Transaction Counts") +'</th><th>'+ _("Last Transaction Time") +'</th><th>'+ _("First Transaction Time") +'</th>' + '<br/>'
html = html + '</tr>'
if assetid != None :
results = web.collection_ads.find({"asset":assetid}).sort("last_tx_time",-1).limit(listnum).skip(start)
else :
results = web.collection_ads.find({"asset":{"$ne":"0"}}).sort("last_tx_time",-1).limit(listnum).skip(start)
if results :
for result in results :
html = html + '<tr>'
html = html + '<td>' + '<a href="/address/' + result['address'] + '">' + result['address'] + '</a></td>'
html = html + '<td>' + web.GetAssetName(result['asset']) + '</td>'
html = html + '<td>' + str(result['value']) + '</td>'
html = html + '<td>' + str(len(result['txid_list'])) + '</td>'
html = html + '<td>' + web.GetLocalTime(result['last_tx_time']) + '</td>'
html = html + '<td>' + web.GetLocalTime(result['first_tx_time']) + '</td>'
html = html + '</tr>'
html = html + '</table>\n'
html = html + '</div>\n'
return html
def GetAddressPage(assetid,page) :
html = web.GetHeader("address")
html = html + '<div name="address" align="center">\n'
html = html + '<br/><br/>\n'
html = html + '<h2>'+ _("Address Information") +'</h2>\n'
html = html + '<div class="container">\n'
count = web.collection_txs.find({"type":"RegisterTransaction"}).count()
results = web.collection_txs.find({"type":"RegisterTransaction"}).sort("height",1)
row = int(math.ceil(count / 4))
r = 0
for i in range(0, row+1) :
html = html + '<div class="row">\n'
html = html + '<div class="column column-20"></div>\n'
for j in range(0,4) :
if i==0 and j==0 :
if assetid == None :
html = html + '<div class="column column-15"><a href="/address/"><b>[' + _('All Asset') + ']</b></a></div>\n'
else :
html = html + '<div class="column column-15"><a href="/address/">[' + _('All Asset') + ']</a></div>\n'
continue
if r >= count :
html = html + '<div class="column column-15"></div>\n'
elif assetid == results[r]['txid']:
html = html + '<div class="column column-15"><a href="/address/' + results[r]['txid'] + '"><b>[' + web.GetAssetNameByAsset(results[r]['asset']) + ']</b></a></div>\n'
else :
html = html + '<div class="column column-15"><a href="/address/' + results[r]['txid'] + '">[' + web.GetAssetNameByAsset(results[r]['asset']) + ']</a></div>\n'
r = r + 1
html = html + '<div class="column column-20"></div>\n'
html = html + '</div>\n'
html = html + '</div>\n'
html = html + '<br/>\n'
if assetid != None :
html = html + '<h4>- '+ web.GetAssetName(assetid) +' -</h4>\n'
Pagination = GetAdsPagination(assetid,page)
html = html + Pagination
html = html + GetAddressInternal(assetid,page,web.ADS_PER_PAGE)
html = html + '<br/>\n'
html = html + Pagination
html = html + '</div>\n'
html = html + web.GetFooter()
return html
def GetAddressPagination(address_all,page,listnum) :
html = ''
html = html + '<div name="pages" align="center">\n'
count = len(address_all['txid_list'])
pages = count / listnum
if count % listnum != 0 :
pages = pages + 1
if page <= 4 :
displaystart = 1
else :
if page - 4 > 1 :
displaystart = page - 4
else :
displaystart = 1
if page >= pages - 4 and pages > 9 :
displaystart = pages - 9
displayend = pages
else :
if pages <= 9 :
displayend = pages
else :
displayend = displaystart + 9
ads = address_all['address']
html = html + '<a href="/address/' + ads + '/page/' + str(1) + '"><<</a> '
for i in range(displaystart,displayend+1) :
if i != page :
html = html + '<a href="/address/' + ads + '/page/' + str(i) + '">' + str(i) + '</a> '
else :
html = html + str(i) + ' '
html = html + '<a href="/address/' + ads + '/page/' + str(pages) + '">>></a> '
html = html + '<br/>\n'
html = html + '</div>\n'
return html
def GetAddressResultInternal(address_all,page,listnum) :
html = ''
nstart = (page-1) * listnum
i = -1
for txid in address_all['txid_list'] :
i = i + 1
if i < nstart :
continue
if i >= (nstart + listnum) :
break
tx_result = web.collection_txs.find_one({"txid":txid['txid']})
html = html + tx.GetTxResultInternal(tx_result,address_all['address'])
html = html + '<hr/>\n'
return html
def GetAddressResult(asset_address,address_all,page) :
html = ''
html = html + '<div class="container">\n'
address = asset_address[0]['address']
html = html + '<div class="row">\n'
html = html + '<div class="column column-15"><b>'+ _("Address") +'</b></div><div class="column"><b>' + address + '</b></div>\n'
html = html + '</div>\n'
ncount = 0
results = {}
for result in asset_address :
html = html + '<div class="row">\n'
html = html + '<div class="column column-15"><b>'+ _("Asset") +'</b></div><div class="column">' + str(result['value']) + ' <b>' + web.GetAssetName(result['asset']) + '</b></div>\n'
html = html + '</div>\n'
results[ncount] = result
ncount = ncount + 1
html = html + '<div class="row">\n'
html = html + '<div class="column column-15"><b>'+ _("First Transaction Time") +'</b></div><div class="column">' + web.GetLocalTime(address_all['first_tx_time']) + '</div>\n'
html = html + '</div>\n'
html = html + '<div class="row">\n'
html = html + '<div class="column column-15"><b>'+ _("Last Transaction Time") +'</b></div><div class="column">' + web.GetLocalTime(address_all['last_tx_time']) + '</div>\n'
html = html + '</div>\n'
html = html + '<div class="row">\n'
html = html + '<div class="column column-15"><b>'+ _("Transaction Nums") +'</b></div><div class="column">' + str(len(address_all['txid_list'])) + '</div>\n'
html = html + '</div>\n'
html = html + '</div>\n'
html = html + '<hr/>\n'
#########################################################################
# list all asset
html = html + '<div class="container">\n'
row = int(math.ceil(ncount / 4))
r = 0
for i in range(0, row+1) :
html = html + '<div class="row">\n'
html = html + '<div class="column column-20"></div>\n'
for j in range(0,4) :
if i==0 and j==0 :
if address_all['asset'] == "0" :
html = html + '<div class="column column-15"><a href="/address/' + address_all['address'] + '"><b>[' + _('All Asset') + ']</b></a></div>\n'
else :
html = html + '<div class="column column-15"><a href="/address/' + address_all['address'] + '">[' + _('All Asset') + ']</a></div>\n'
continue
if r >= ncount :
html = html + '<div class="column column-15"></div>\n'
elif address_all['asset'] == results[r]['asset']:
html = html + '<div class="column column-15"><a href="/address/' + address_all['address'] + '/' + results[r]['asset'] + '"><b>[' + web.GetAssetName(results[r]['asset']) + ']</b></a></div>\n'
else :
html = html + '<div class="column column-15"><a href="/address/' + address_all['address'] + '/' + results[r]['asset'] + '">[' + web.GetAssetName(results[r]['asset']) + ']</a></div>\n'
r = r + 1
html = html + '<div class="column column-20"></div>\n'
html = html + '</div>\n'
html = html + '</div>\n'
html = html + '<hr/>\n'
#########################################################################
Pagination = GetAddressPagination(address_all,page,web.ADS_PER_PAGE)
html = html + Pagination
html = html + GetAddressResultInternal(address_all,page,web.ADS_PER_PAGE)
html = html + '<br/>\n'
html = html + Pagination
return html
def GetAdsByAddressPagesInternal(address,assetid,page) :
html = web.GetHeader("address")
#asset = "c56f33fc6ecfcd0c225c4ab356fee59390af8560be0e930faebe74a6daff7c9b"
if assetid == None :
asset_adddress = web.collection_ads.find({"asset":{"$ne":"0"},"address":address}).sort("first_tx_time",1)
address_all = web.collection_ads.find_one({"asset":"0","address":address})
if asset_adddress and address_all:
html = html + GetAddressResult(asset_adddress,address_all,page)
else :
html = html + _("Address Not Found!")
else :
asset_adddress = web.collection_ads.find({"asset":{"$ne":"0"},"address":address}).sort("first_tx_time",1)
address_all = web.collection_ads.find_one({"asset":assetid,"address":address})
if asset_adddress and address_all:
html = html + GetAddressResult(asset_adddress,address_all,page)
else :
html = html + _("Asset or Address Not Found!")
html = html + web.GetFooter()
return html | antchain/antchain.org | web/ads.py | Python | mit | 10,223 | 0.043138 |
'''Utility methods
'''
import logging
import os
import numpy as np
import torch
__author__ = 'R Devon Hjelm'
__author_email__ = 'erroneus@gmail.com'
logger = logging.getLogger('cortex.util')
try:
_, _columns = os.popen('stty size', 'r').read().split()
_columns = int(_columns)
except ValueError:
_columns = 1
def print_section(s):
'''For printing sections to scripts nicely.
Args:
s (str): string of section
'''
h = s + ('-' * (_columns - len(s)))
print(h)
def update_dict_of_lists(d_to_update, **d):
'''Updates a dict of list with kwargs.
Args:
d_to_update (dict): dictionary of lists.
**d: keyword arguments to append.
'''
for k, v in d.items():
if isinstance(v, dict):
if k not in d_to_update.keys():
d_to_update[k] = {}
update_dict_of_lists(d_to_update[k], **v)
elif k in d_to_update.keys():
d_to_update[k].append(v)
else:
d_to_update[k] = [v]
def bad_values(d):
failed = {}
for k, v in d.items():
if isinstance(v, dict):
v_ = bad_values(v)
if v_:
failed[k] = v_
else:
if isinstance(v, (list, tuple)):
v_ = []
for v__ in v:
if isinstance(v__, torch.Tensor):
v_.append(v__.item())
else:
v_.append(v__)
v_ = np.array(v_).sum()
elif isinstance(v, torch.Tensor):
v_ = v.item()
else:
v_ = v
if np.isnan(v_) or np.isinf(v_):
failed[k] = v_
if len(failed) == 0:
return False
return failed
def convert_to_numpy(o):
if isinstance(o, torch.Tensor):
o = o.data.cpu().numpy()
if len(o.shape) == 1 and o.shape[0] == 1:
o = o[0]
elif isinstance(o, (torch.cuda.FloatTensor, torch.cuda.LongTensor)):
o = o.cpu().numpy()
elif isinstance(o, list):
for i in range(len(o)):
o[i] = convert_to_numpy(o[i])
elif isinstance(o, tuple):
o_ = tuple()
for i in range(len(o)):
o_ = o_ + (convert_to_numpy(o[i]),)
o = o_
elif isinstance(o, dict):
for k in o.keys():
o[k] = convert_to_numpy(o[k])
return o
def compute_tsne(X, perplexity=40, n_iter=300, init='pca'):
from sklearn.manifold import TSNE
tsne = TSNE(2, perplexity=perplexity, n_iter=n_iter, init=init)
points = X.tolist()
return tsne.fit_transform(points)
| rdevon/cortex | cortex/_lib/utils.py | Python | bsd-3-clause | 2,635 | 0 |
###################################################################################
#
# Copyright (c) 2017-2019 MuK IT GmbH.
#
# This file is part of MuK Documents View
# (see https://mukit.at).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###################################################################################
from . import main
from . import backend
| muk-it/muk_dms | muk_dms_view/controllers/__init__.py | Python | lgpl-3.0 | 1,019 | 0.003925 |
#!/usr/bin/env python
'''
ansible module for zabbix triggers
'''
# vim: expandtab:tabstop=4:shiftwidth=4
#
# Zabbix trigger ansible module
#
#
# Copyright 2015 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is in place because each module looks similar to each other.
# These need duplicate code as their behavior is very similar
# but different for each zabbix class.
# pylint: disable=duplicate-code
# pylint: disable=import-error
from openshift_tools.monitoring.zbxapi import ZabbixAPI, ZabbixConnection
def exists(content, key='result'):
''' Check if key exists in content or the size of content[key] > 0
'''
if not content.has_key(key):
return False
if not content[key]:
return False
return True
def get_priority(priority):
''' determine priority
'''
prior = 0
if 'info' in priority:
prior = 1
elif 'warn' in priority:
prior = 2
elif 'avg' == priority or 'ave' in priority:
prior = 3
elif 'high' in priority:
prior = 4
elif 'dis' in priority:
prior = 5
return prior
def get_deps(zapi, deps):
''' get trigger dependencies
'''
results = []
for desc in deps:
content = zapi.get_content('trigger',
'get',
{'filter': {'description': desc},
'expandExpression': True,
'selectDependencies': 'triggerid',
})
if content.has_key('result'):
results.append({'triggerid': content['result'][0]['triggerid']})
return results
def get_trigger_status(inc_status):
''' Determine the trigger's status
0 is enabled
1 is disabled
'''
r_status = 0
if inc_status == 'disabled':
r_status = 1
return r_status
def get_template_id(zapi, template_name):
'''
get related templates
'''
template_ids = []
app_ids = {}
# Fetch templates by name
content = zapi.get_content('template',
'get',
{'search': {'host': template_name},
'selectApplications': ['applicationid', 'name']})
if content.has_key('result'):
template_ids.append(content['result'][0]['templateid'])
for app in content['result'][0]['applications']:
app_ids[app['name']] = app['applicationid']
return template_ids, app_ids
def main():
'''
Create a trigger in zabbix
Example:
"params": {
"description": "Processor load is too high on {HOST.NAME}",
"expression": "{Linux server:system.cpu.load[percpu,avg1].last()}>5",
"dependencies": [
{
"triggerid": "14062"
}
]
},
'''
module = AnsibleModule(
argument_spec=dict(
zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
zbx_debug=dict(default=False, type='bool'),
expression=dict(default=None, type='str'),
name=dict(default=None, type='str'),
description=dict(default=None, type='str'),
dependencies=dict(default=[], type='list'),
priority=dict(default='avg', type='str'),
url=dict(default=None, type='str'),
status=dict(default=None, type='str'),
state=dict(default='present', type='str'),
template_name=dict(default=None, type='str'),
hostgroup_name=dict(default=None, type='str'),
query_type=dict(default='filter', choices=['filter', 'search'], type='str'),
),
#supports_check_mode=True
)
zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
module.params['zbx_user'],
module.params['zbx_password'],
module.params['zbx_debug']))
#Set the instance and the template for the rest of the calls
zbx_class_name = 'trigger'
idname = "triggerid"
state = module.params['state']
tname = module.params['name']
templateid = None
if module.params['template_name']:
templateid, _ = get_template_id(zapi, module.params['template_name'])
content = zapi.get_content(zbx_class_name,
'get',
{module.params['query_type']: {'description': tname},
'expandExpression': True,
'selectDependencies': 'triggerid',
'templateids': templateid,
'group': module.params['hostgroup_name'],
})
# Get
if state == 'list':
module.exit_json(changed=False, results=content['result'], state="list")
# Delete
if state == 'absent':
if not exists(content):
module.exit_json(changed=False, state="absent")
content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0][idname]])
module.exit_json(changed=True, results=content['result'], state="absent")
# Create and Update
if state == 'present':
params = {'description': tname,
'comments': module.params['description'],
'expression': module.params['expression'],
'dependencies': get_deps(zapi, module.params['dependencies']),
'priority': get_priority(module.params['priority']),
'url': module.params['url'],
'status': get_trigger_status(module.params['status']),
}
# Remove any None valued params
_ = [params.pop(key, None) for key in params.keys() if params[key] is None]
#******#
# CREATE
#******#
if not exists(content):
# if we didn't find it, create it
content = zapi.get_content(zbx_class_name, 'create', params)
if content.has_key('error'):
module.exit_json(failed=True, changed=True, results=content['error'], state="present")
module.exit_json(changed=True, results=content['result'], state='present')
########
# UPDATE
########
differences = {}
zab_results = content['result'][0]
for key, value in params.items():
if zab_results[key] != value and zab_results[key] != str(value):
differences[key] = value
if not differences:
module.exit_json(changed=False, results=zab_results, state="present")
# We have differences and need to update
differences[idname] = zab_results[idname]
content = zapi.get_content(zbx_class_name, 'update', differences)
module.exit_json(changed=True, results=content['result'], state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()
| menren/openshift-ansible | roles/lib_zabbix/library/zbx_trigger.py | Python | apache-2.0 | 8,027 | 0.00436 |
import pandas as pd
from datetime import date, timedelta
import time
import numpy as np
import re
import psycopg2
import ConfigParser
import argparse
from sqlalchemy import create_engine
import random
import sql
parser = argparse.ArgumentParser()
parser.add_argument('-cf','--contract_file',help='Contract data file')
parser.add_argument('-if','--invest_file',help='Labelled data file')
parser.add_argument('-a','--amounts',action='store_true',default=False,help='Calculate aggregated amount features')
parser.add_argument('-dist','-dist',action='store_true',default=True,help='Calculate distribution features')
parser.add_argument('-dom','-dom',action='store_true',default=False,help='Calculate dominance features')
parser.add_argument('-y','--num_years',default=0,help='Time periods in years')
parser.add_argument('-cat','--categ',default=['major_sector'],nargs='*',help='Categoricals to use')
parser.add_argument('-id','--table_id',default=time.strftime("%Y%m%d"),help='ID for SQL tables')
parser.add_argument('-lim','--contract_num_lim',default=5000,help='Maximum number of rows to use')
args = parser.parse_args()
def connect():
"""Connect to database"""
#read password from config file
config = ConfigParser.RawConfigParser()
config.read('config')
password = config.get('SQL','password')
#open connection with database
config = ConfigParser.RawConfigParser()
config.read('config')
password = config.get('SQL','password')
con = psycopg2.connect(host="localhost",user='dssg',password=password,dbname="world_bank")
return con
def snake_case(name):
"""Clean entity name strings"""
remove_list = ['llc','ltd','llc','ltd','co','corporation','srl','nv','limited','pvtltd']
remove = '|'.join(remove_list)
regex = re.compile(r'\b('+remove+r')\b', flags=re.IGNORECASE)
try:
s1 = name.lower()
s1 = s1.replace('.','')
s1 = regex.sub("", s1)
s1 = s1.strip()
s1 = re.sub(' +','_',s1)
s1 = re.sub('-','_',s1)
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s1)
s1 = s1.replace('*','')
s1 = s1.replace('(','')
s1 = s1.replace(')','')
s1 = s1.replace('"','')
s1 = s1.replace(',','')
s1 = s1.replace('#','')
s1 = s1.replace(':','_')
s1 = s1.replace('&','_')
s1 = s1.replace('\'','')
s1 = s1.replace('/','_')
s1 = re.sub('_+','_',s1)
except:
s1 = ''
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def reformat(data,column,inplace=False,shorten=False):
if inplace:
data[column] = data[column].map(lambda x: snake_case(x))
else:
data[column + '_reformat'] = data[column].map(lambda x: snake_case(x))
if shorten:
data[column] = [re.sub(r'and', '', x).replace('__','_') for x in data[column]]
data[column] = [re.sub(r'[aeiou]', '', x) for x in data[column]]
return data
def binarize(data,fields):
dummies = pd.get_dummies(data[fields]).astype('int64')
dummies.columns = ['_'.join(('is',fields,col,'ct')) for col in dummies.columns]
data = data.merge(dummies,left_index=True,right_index=True,how='left')
return data
def conditional_amounts(data):
for col in data.columns:
if 'is' in col and 'total' not in col and 'cum' not in col and 'percent' not in col and 'dominance' not in col:
data[re.sub('_ct$','',col) + '_amt'] = data[col]*data['amount_standardized']
return data
def distribution(data,field,amount=False):
cols_to_use = []
for col in data.columns:
if 'is' in col and 'cum' in col and field in col and 'total' not in col and 'percent' not in col and 'dominance' not in col:
if amount and 'amt' in col:
cols_to_use.append(col)
elif not amount and not 'amt' in col:
cols_to_use.append(col)
subset = data[cols_to_use]
dist = subset.apply(lambda x: 100.0*x/x.sum(), axis=1)
dist.columns = [col + '_percent' for col in dist.columns]
return dist
def count_previous_contracts(data,days=0,amount = True, count = False):
"""Count number of data entries in the past n days from each entry"""
def sum_func(column):
def inner_func(t):
if days == 0:
min_date_lim = 0
else:
min_date_lim = t - timedelta(days)
total = data.ix[(min_date_lim < data['contract_signing_date']) & (data['contract_signing_date'] <= t),[column,'amount_standardized']]
if amount:
total_sum = ((total[column] != 0)*total['amount_standardized']).cumsum()
else:
total_sum = total[column].cumsum()
return total_sum
return inner_func
data = data.sort('contract_signing_date')
count = 0
for col in data.columns:
if 'is' in col and 'total' not in col and 'cum' not in col and 'full' not in col and 'year' not in col:
func = sum_func(col)
result_temp = data[['contract_signing_date']].apply(func)
result_temp = pd.DataFrame(result_temp)
result_temp.columns = [col + '_cum']
if count == 0:
result = result_temp
else:
result = result.merge(result_temp,left_index=True,right_index=True,how='left')
count += 1
data = data.merge(result,left_index=True,right_index=True,how='left')
return data
def dominance(data,field,not_field=[]):
col_list = []
for col in data.columns:
if 'is' in col and 'cum' in col and field in col and 'total' not in col and 'percent' not in col and 'dominance' not in col:
col_list.append(col+'_dominance')
data[col + '_dominance'] = data[col]/data[col + '_total']
data.replace([np.inf, -np.inf], np.nan,inplace=True)
data[col + '_dominance'] = data[col + '_dominance'].fillna(0)
return data
def rank(data,col_base,no=[]):
"""Rank the values in a set of fields to create anonymous ranking fields
e.g. first_major_sector_percent, second_major_sector_percent, ..."""
#find matching columns
col_list = []
for col in data.columns:
match = True
for base in col_base:
if base not in col:
match = False
if match:
col_list.append(col)
data_sub = data[col_list]
#sort the columns by value
data_array = np.array(data_sub)
data_array.sort(axis=1)
data_array = np.fliplr(data_array)
#create data frame with column names
df = pd.DataFrame(data_array,index=data.index,columns=['_'.join(('_'.join(col_base),str(i + 1))) for i in range(len(col_list))])
return df
def get_engine():
config = ConfigParser.RawConfigParser()
config.read('config')
password = config.get('SQL','password')
engine = create_engine(r'postgresql://dssg:' + password + '@localhost/world_bank')
return engine
def write_sql_query(fields,table_name,years=0,amount=False,total=False,table_name2=''):
if table_name2 == '':
table_name2 = table_name
sql_base = 'SELECT st1.supplier_reformat,st1.contract_signing_date, st1.amount_standardized,st1.unique_id'
for field in fields:
if not total:
sql_base += ',\nSUM(st2."' + field + '") AS "' + field + '_cum"'
else:
sql_base += ',\nSUM(st2."' + field + '") AS "' + field + '_cum_total"'
sql_base += '\nFROM\n'
sql_base += table_name + ' AS st1\n'
sql_base += 'INNER JOIN\n'
sql_base += table_name2 + ' AS st2\n'
sql_base += 'ON\n'
sql_base += 'st2.contract_signing_date <= st1.contract_signing_date'
if years != 0:
sql_base += ' AND\n st2.contract_signing_date >= st1.contract_signing_date::date - ' + str(years*365)
if not total:
sql_base += ' AND\n st2.supplier_reformat = st1.supplier_reformat'
sql_base += '\nGROUP BY st1.contract_signing_date, st1.amount_standardized, st1.supplier_reformat, st1.unique_id\n'
sql_base += 'ORDER BY st1.contract_signing_date'
sql_base += ';'
return sql_base
def fix_duplicate_columns(data):
cols_fixed = []
for col in data.columns:
pattern_y = re.compile('.*_y')
pattern_x = re.compile('.*_x')
if pattern_y.match(col):
data.drop(col,axis=1,inplace=True)
elif pattern_x.match(col):
cols_fixed.append(col[:-2])
else:
cols_fixed.append(col)
data.columns = cols_fixed
return data
def setup_binary_fields(contracts,amounts,categories):
print 'Generating binary fields...'
start = time.time()
boolean_fields = []
for field in categories:
# boolean_fields.append([])
print ' ' + field + '...',
contracts = binarize(contracts,field)
for col in contracts.columns:
if 'is' in col and field in col and len(categories) != 2:
if not amounts:
boolean_fields.append(col)
else:
boolean_fields.append(re.sub('_ct$','',col) + '_amt')
print time.time() - start, 's elapsed'
if len(categories) == 2:
print 'Generating combined binary fields...'
start = time.time()
# boolean_fields.append([])
for cat1 in contracts[categories[0]].unique():
for cat2 in contracts[categories[1]].unique():
if ( (contracts[categories[0]] == cat1) & (contracts[categories[1]] == cat2)).sum() > 0:
col_name = '_'.join(('is',categories[0],categories[1],cat1,cat2 ,'ct'))
contracts[col_name] = (contracts[categories[0]] == cat1) & (contracts[categories[1]] == cat2)
contracts[col_name] = contracts[col_name].astype('int64')
if not amounts:
boolean_fields.append(col_name)
if amounts:
boolean_fields.append(re.sub('_ct$','',col_name) + '_amt')
print time.time() - start, 's elapsed'
print 'Boolean fields: ',len(boolean_fields)
print 'Conditional amounts...'
if amounts:
contracts = conditional_amounts(contracts)
print time.time() - start, 's elapsed'
return contracts,boolean_fields
def drop_duplicate_cols(contracts):
cols_fixed = []
for col in contracts.columns:
pattern_y = re.compile('.*_y')
pattern_x = re.compile('.*_x')
if pattern_y.match(col):
print 'dropping ' + col
contracts.drop(col,axis=1,inplace=True)
elif pattern_x.match(col):
print 'keeping ' + col,col[:-2]
cols_fixed.append(col[:-2])
else:
cols_fixed.append(col)
contracts.columns = cols_fixed
col_list = []
for i,col in enumerate(contracts.columns):
if col not in col_list:
col_list.append(col)
else:
col_list.append(col + '2')
contracts.columns = col_list
return contracts
def cleaning(contracts,categories):
"""Drop duplicate column names, reformat names, """
drop_duplicate_cols(contracts)
contracts = reformat(contracts,'supplier')
contracts = reformat(contracts,'country',inplace=True)
contracts = reformat(contracts,'region',inplace=True,shorten=True)
contracts['major_sector'][contracts['major_sector'].str.contains("\(H\)")] = 'Other'
contracts['major_sector'][contracts['major_sector'].str.contains("X")] = 'Other'
contracts['major_sector'][contracts['major_sector'].str.contains("Not assigned")] = 'Other'
contracts['prc_ctg'] = contracts['procurement_category']
contracts['prc_typ'] = contracts['procurement_type']
contracts = reformat(contracts,'major_sector',inplace=True,shorten=True)
contracts = reformat(contracts,'prc_ctg',inplace=True,shorten=True)
contracts = reformat(contracts,'prc_typ',inplace=True,shorten=True)
contracts['ctry'] = contracts['country']
contracts['rgn'] = contracts['region']
contracts['sect'] = contracts['major_sector']
#interesting columns
contracts = contracts[['supplier_reformat','supplier','contract_signing_date',
'amount_standardized','wb_contract_number','unique_id'] + categories]
contracts = contracts[contracts['amount_standardized'].notnull()]
contracts['amount_standardized'] = contracts['amount_standardized'].astype('int64')
#convert date to datetime
contracts['contract_signing_date'] = pd.to_datetime(contracts['contract_signing_date'])
return contracts
def main():
print 'Connecting to database...',
start = time.time()
engine = get_engine()
con = engine.connect()
print time.time() - start,'s elapsed'
print 'Reading data...',
start = time.time()
contracts = pd.read_csv(args.contract_file)
# contracts = pd.read_csv('/mnt/data/world-bank/joinedcontracts_features_phase4_resolved.csv')
# labelled_contracts = pd.read_csv('/mnt/data/world-bank/joinedcontracts_features_phase4_supplier_features_labelled_resolved.csv')
labelled_contracts = pd.read_csv(args.invest_file)
print time.time() - start, 's elapsed'
print labelled_contracts.shape
if len(labelled_contracts.index) > args.contract_num_lim:
labelled_contracts.sort(['contract_signing_date'],inplace=True)
labelled_contracts = labelled_contracts.head(args.contract_num_lim)
print labelled_contracts.shape
contracts['unique_id'] = contracts.index
labelled_contracts['unique_id'] = labelled_contracts.index
labelled_contracts.to_sql(args.invest_file.split('/')[-1].split('.')[0] + '_' + args.table_id,engine,if_exists='replace')
#drop duplicate column names
contracts = drop_duplicate_cols(contracts)
labelled_contracts = drop_duplicate_cols(labelled_contracts)
#make sure labelled contracts are included in contracts (Should be true anyway)
contracts = pd.concat([contracts,labelled_contracts[contracts.columns]])
contracts.drop_duplicates(inplace=True,cols=['supplier','wb_contract_number','major_sector','amount_standardized'])
amounts = args.amounts
dist_bool = args.dist
dom_bool = args.dom
categories = args.categ
dt = args.num_years
supplier_list = labelled_contracts['supplier'].unique()
if dist_bool:
#we don't care about the overall distribution so limit ourselves to labelled suppliers
print len(contracts.index)
contracts = contracts[contracts['supplier'].isin(supplier_list)]
print len(contracts.index)
if dom_bool:
#only need total counts for fields present in labelled data
for categ in categories:
print len(contracts.index)
categ_list = labelled_contracts[categ].unique()
contracts = contracts[contracts[categ].isin(categ_list)]
print len(contracts.index)
categs_temp = []
for categ in categories:
if categ == 'major_sector':
categ = 'sect'
if categ == 'country':
categ = 'ctry'
if categ == 'region':
categ = 'rgn'
if categ == 'procurement_category':
categ = 'prc_ctg'
if categ == 'procurement_type':
categ = 'prc_typ'
categs_temp.append(categ)
categories = categs_temp
#clean data and create dummy boolean fields
contracts = cleaning(contracts,categories)
labelled_contracts = cleaning(labelled_contracts,categories)
contracts,boolean_fields = setup_binary_fields(contracts,amounts,categories)
labelled_contracts,boolean_fields_labelled = setup_binary_fields(labelled_contracts,amounts,categories)
start_cols = labelled_contracts.columns
print 'Num years: ', dt
field = '_'.join(categories)
field_list = boolean_fields
field_list_labelled = boolean_fields_labelled
field_list = [val for val in boolean_fields_labelled if val in set(boolean_fields)]
if True:
# for field_list,field_list_labelled in zip(boolean_fields,boolean_fields_labelled):
table_name = 'contracts_w_booleans_' + args.table_id
if amounts:
table_name = '_'.join((table_name,'amt',field))
else:
table_name = '_'.join((table_name,field))
result = con.execute("SELECT table_name FROM information_schema.tables ORDER BY table_name;")
result = list(result.fetchall())
tables = [r[0] for r in result]
if True:
print 'Running full table'
print 'Writing to database...'
start = time.time()
contracts_boolean_fields = contracts[['supplier_reformat','contract_signing_date',
'amount_standardized','unique_id'] + field_list]
con.execute('DROP TABLE IF EXISTS ' + table_name + ';')
print len(contracts_boolean_fields.index)
for q in range((len(contracts_boolean_fields.index) / 5000) + 1):
subset = contracts_boolean_fields.iloc[q*5000:min((q+1)*5000,len(contracts_boolean_fields.index))]
print q, subset.shape
if (q==0):
subset.to_sql(table_name,engine,if_exists='replace')
else:
subset.to_sql(table_name,engine,if_exists='append')
print 'Writing to database...',
table_name2 = 'contracts_w_booleans_lab_' + args.table_id
if amounts:
table_name2 = '_'.join((table_name2,'amt',field))
else:
table_name2 = '_'.join((table_name2,field))
start = time.time()
contracts_boolean_fields_labelled = labelled_contracts[['supplier_reformat','contract_signing_date',
'amount_standardized','unique_id']
+ field_list]
con.execute('DROP TABLE IF EXISTS ' + table_name2 + ';')
contracts_boolean_fields_labelled.to_sql(table_name2, engine)
print time.time() - start,'s elapsed'
total_agg = [False]
if dom_bool:
total_agg.append(True)
for tagg in total_agg:
print 'Running SQL statement...',tagg,
start = time.time()
sql_statement = write_sql_query(field_list,
table_name2,
total=tagg,
table_name2=table_name)
result = con.execute(sql_statement)
print result
sql_results = pd.DataFrame(result.fetchall())
sql_results.columns = result.keys()
for col in sql_results.columns:
if 'ct_cum' in col or 'amt_cum' in col:
sql_results[col] = sql_results[col].astype(float)
print labelled_contracts.shape
labelled_contracts = labelled_contracts.merge(sql_results,
on=['supplier_reformat',
'contract_signing_date',
'amount_standardized',
'unique_id'],
how='left')
print labelled_contracts.shape
print time.time() - start,'s elapsed'
print 'Generating supplier specific counts...'
start = time.time()
print ' ' + field + '...'
labelled_contracts = labelled_contracts.sort(['supplier','contract_signing_date'])
if dist_bool:
print ' distribution...',
start = time.time()
dist = distribution(labelled_contracts,field,amount=amounts)
labelled_contracts = labelled_contracts.merge(dist,left_index=True,right_index=True,how='left')
print time.time() - start, 's elapsed'
if dom_bool:
print ' dominance...',
start = time.time()
labelled_contracts = dominance(labelled_contracts,field)
print time.time() - start, 's elapsed'
#drop temperary fields
for col in labelled_contracts.columns:
if '_total' in col:
labelled_contracts.drop(col,axis=1,inplace=True)
print 'Creating anonymous ranking features...'
start = time.time()
if dist_bool:
if not amounts:
print field
anonymous_dist = rank(labelled_contracts,col_base=[field,'percent','ct'])
else:
anonymous_dist = rank(labelled_contracts,col_base=[field,'percent','amt'])
labelled_contracts = labelled_contracts.merge(anonymous_dist,left_index=True,right_index=True)
print time.time() - start, 's elapsed'
cols_added = labelled_contracts.columns.difference(start_cols).tolist()
dt_name = 'full'
if int(dt) != 0:
dt_name = str(dt) + 'years'
cols_renamed = []
for col in cols_added:
cols_renamed.append(col + '_' + dt_name)
dictionary = dict(zip(cols_added, cols_renamed))
labelled_contracts.rename(columns=dictionary,inplace=True)
labelled_contracts = labelled_contracts.sort(['supplier','contract_signing_date'])
booleans = [inner for outer in boolean_fields_labelled for inner in outer]
contracts_to_write = labelled_contracts[labelled_contracts.columns - booleans]
contracts_to_write.columns = [col.replace('country','cty') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('percent','pct') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('major_sector','sect') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('dominance','dom') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('amount','amt') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('years','yr') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.lower() for col in contracts_to_write.columns]
contracts_to_write = contracts_to_write.fillna(0)
zero_cols = contracts_to_write.apply(lambda x: np.all(x==0))
for col,value in zip(zero_cols.index,zero_cols):
if value:
contracts_to_write.drop(col,axis=1,inplace=True)
if amounts:
agg_types = ['amt_cum_pct','pct_amt']
else:
agg_types = ['ct_cum_pct','pct_ct']
already_used = ['unique_id','supplier_reformat','supplier',
'wb_contract_number','sect','region','ctry',
'contract_signing_date','amt_standardized']
for agg_type in agg_types:
final_cols = ['unique_id','supplier_reformat','supplier',
'wb_contract_number','contract_signing_date',
'amt_standardized'] + categories
for col in contracts_to_write.columns:
if agg_type in col and col not in already_used:
already_used.append(col)
final_cols.append(col)
to_write_subset = contracts_to_write[final_cols]
output_name = '_'.join(('cntrcts_splr_ftr_set_' + args.table_id,field,agg_type))
if dist_bool:
output_name += '_dist'
if dom_bool:
output_name += '_dominance'
output_name += '_' + dt_name
# output_name += '_test2'
con.execute('DROP TABLE IF EXISTS ' + output_name + ';')
to_write_subset.to_sql(output_name,engine)
print labelled_contracts.shape
print contracts.shape
if __name__ == "__main__":
main()
| eredmiles/GeneralDataScienceToolsDSSG2015 | WorldBank2015/Code/data_pipeline_src/supplier_feature_gen.py | Python | mit | 24,730 | 0.016781 |
# coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: blah@cliffano.com
Generated by: https://openapi-generator.tech
"""
from openapi_client.api_client import ApiClient
from openapi_client.api.remote_access_api_endpoints.get_computer import GetComputer
from openapi_client.api.remote_access_api_endpoints.get_jenkins import GetJenkins
from openapi_client.api.remote_access_api_endpoints.get_job import GetJob
from openapi_client.api.remote_access_api_endpoints.get_job_config import GetJobConfig
from openapi_client.api.remote_access_api_endpoints.get_job_last_build import GetJobLastBuild
from openapi_client.api.remote_access_api_endpoints.get_job_progressive_text import GetJobProgressiveText
from openapi_client.api.remote_access_api_endpoints.get_queue import GetQueue
from openapi_client.api.remote_access_api_endpoints.get_queue_item import GetQueueItem
from openapi_client.api.remote_access_api_endpoints.get_view import GetView
from openapi_client.api.remote_access_api_endpoints.get_view_config import GetViewConfig
from openapi_client.api.remote_access_api_endpoints.head_jenkins import HeadJenkins
from openapi_client.api.remote_access_api_endpoints.post_create_item import PostCreateItem
from openapi_client.api.remote_access_api_endpoints.post_create_view import PostCreateView
from openapi_client.api.remote_access_api_endpoints.post_job_build import PostJobBuild
from openapi_client.api.remote_access_api_endpoints.post_job_config import PostJobConfig
from openapi_client.api.remote_access_api_endpoints.post_job_delete import PostJobDelete
from openapi_client.api.remote_access_api_endpoints.post_job_disable import PostJobDisable
from openapi_client.api.remote_access_api_endpoints.post_job_enable import PostJobEnable
from openapi_client.api.remote_access_api_endpoints.post_job_last_build_stop import PostJobLastBuildStop
from openapi_client.api.remote_access_api_endpoints.post_view_config import PostViewConfig
class RemoteAccessApi(
GetComputer,
GetJenkins,
GetJob,
GetJobConfig,
GetJobLastBuild,
GetJobProgressiveText,
GetQueue,
GetQueueItem,
GetView,
GetViewConfig,
HeadJenkins,
PostCreateItem,
PostCreateView,
PostJobBuild,
PostJobConfig,
PostJobDelete,
PostJobDisable,
PostJobEnable,
PostJobLastBuildStop,
PostViewConfig,
ApiClient,
):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
pass
| cliffano/swaggy-jenkins | clients/python-experimental/generated/openapi_client/api/remote_access_api.py | Python | mit | 2,651 | 0.006413 |
# solve cliff-walking task with Q-Learning, very similar to SARSA
# original example problem from the book, introduction for reinforcement learning
# Author: Wenbin Li
# numeric backend
import pygame
from pygame.locals import *
import numpy as np
grid_size = 100
n_row = 4
n_col = 12
state = np.zeros((n_row * grid_size, n_col * grid_size))
step_size = 0.5
epsilon = 0.1 # parameter for epislon-greedy
N_actions = 4 # number of actions {left,up,right,down}
N_episodes = 600 # number of episodes
# as suggested by the book, reach optimality by 8000 time steps
# rewards of -1 until the goal state is reached
# -100 for entering cliff region and instantly return to starting position
# specify goal location
goal_r = 3
goal_c = 11
# specify start location
start_r = 3
start_c = 0
# initialize state-action value function
q = np.zeros((n_row,n_col,N_actions)) # num_row by num_col by num_states
# Note: Q(terminal-state,.) = 0
# undiscounted and episodic task
n_steps = 0
n_episodes = 0
# epsilon-greedy strategy
def ep_greedy(epsilon,num_actions,q,i,j):
roll = np.random.uniform(0,1)
# epsilon-greedy strategy
if roll < epsilon: # exploration
a = np.random.randint(0,num_actions)
else: # exploitation
a = np.argmax(q[i,j,:])
return a
# translate action into state-change
def action2state(i,j,a):
# Note: coordintate system start from the upper-left corner and
# right/downwards are the positive direction
if a == 0: # to left
i_next = i
j_next = j - 1
elif a == 1: # upwards
i_next = i - 1
j_next = j
elif a == 2: # to right
i_next = i
j_next = j + 1
else: # downwards
i_next = i + 1
j_next = j
return i_next,j_next
# Sarsa method
while n_episodes < N_episodes:
# begin of an episode
i = start_r
j = start_c
# end of an episode
n_episodes += 1
print "episode ",str(n_episodes),"..."
while True:
n_steps += 1
# print " step ",str(n_steps),"..."
# choose A from S using policy derived from Q (epsilon-greedy)
a = ep_greedy(epsilon,N_actions,q,i,j)
# translate action into state-change with windy effect
i_next,j_next = action2state(i,j,a)
# update the state-action value function with Sarsa/Q-Learning of choice
# state transitions end in the goal state
# state should be in the range of the gridworld
if i_next == goal_r and j_next == goal_c: # reach the goal position
# q[i,j] = q[i,j] + step_size * (-1 + 0 - q[i,j]) #the Q(terminal,.) = 0
q[i,j,a] = q[i,j,a] + step_size * (-1 + 0 - q[i,j,a]) #the Q(terminal,.) = 0
# Note, transition from noterminal to terminal also gets reward of -1 in this case
break
# different reward/consequence when entering the cliff region
elif i_next == 3 and j_next > 1 and j_next < n_col - 1:
i_next = start_r
j_next = start_c
r = -100
elif i_next < 0 or i_next > n_row -1:
i_next = i
r = -1
elif j_next < 0 or j_next > n_col - 1:
j_next = j
r = -1
else:
r = -1
# a_next = ep_greedy(epsilon,N_actions,q,i_next,j_next)
q[i,j,a] = q[i,j,a] + step_size * (r + max(q[i_next,j_next,:]) - q[i,j,a])
i = i_next
j = j_next
# visualize the solution/GUI-backend
# plot the gridworld as background
# (optional) mark wind direction
pygame.init()
pygame.display.set_mode((n_col * grid_size,n_row * grid_size))
pygame.display.set_caption('Cliff Walking')
screen = pygame.display.get_surface()
surface = pygame.Surface(screen.get_size())
bg = pygame.Surface(screen.get_size())
# draw background, with mark on start/end states & cliff region
def draw_bg(surface,n_row,n_col,grid_size,start_r,start_c,goal_r,goal_c):
for i in range(n_col):
for j in range(n_row):
x = i * grid_size
y = j * grid_size
coords = pygame.Rect(x,y,grid_size,grid_size)
pygame.draw.rect(surface,(255,255,255),coords,1)
# draw start state
pygame.draw.circle(surface,(192,192,192),(start_c * grid_size + grid_size/2,
start_r * grid_size + grid_size/2),grid_size/4)
# draw goal state
pygame.draw.circle(surface,(102,204,0),(goal_c * grid_size + grid_size/2,
goal_r * grid_size + grid_size/2),grid_size/4)
# draw cliff region
x = 1 * grid_size
y = 3 * grid_size
coords = pygame.Rect(x,y,grid_size*10,grid_size)
pygame.draw.rect(surface,(192,192,192),coords)
# use state-action function to find one-step optimal policy
def step_q(q,s_r,s_c,n_row,n_col):
print "state-action value:"
print q[s_r,s_c,:]
a = np.argmax(q[s_r,s_c,:]) # greedy only
# display debug
if a == 0:
print "move left"
elif a == 1:
print "move upward"
elif a == 2:
print "move right"
else:
print "move downwards"
s_r_next,s_c_next = action2state(s_r,s_c,a)
# define rules especially when the agent enter the cliff region
if s_r_next == 3 and s_c_next > 1 and s_c_next < n_col - 1:
s_r_next = start_r
s_c_next = start_c
# in theory, the produced optimal policy should not enter this branch
elif s_r_next < 0 or s_r_next > n_row -1:
s_r_next = s_r
elif s_c_next < 0 or s_c_next > n_col - 1:
s_c_next = s_c
return s_r_next,s_c_next
s_r = start_r
s_c = start_c
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# draw gridworld background
draw_bg(bg,n_row,n_col,grid_size,start_r,start_c,goal_r,goal_c)
screen.blit(bg,(0,0))
# draw the state of the agent, i.e. the path (start --> end) as the foreground
surface.fill((0,0,0))
# use state-action function to find a optimal policy
# in the loop, should provide a step function
#print (s_r,s_c)
s_r_next,s_c_next = step_q(q,s_r,s_c,n_row,n_col)
#print (s_r_next,s_c_next)
if s_r_next != goal_r or s_c_next != goal_c:
pygame.draw.circle(surface,(255,255,255),(s_c_next * grid_size + grid_size/2,
s_r_next * grid_size + grid_size/2),grid_size/4)
bg.blit(surface,(0,0))
pygame.display.flip() # update
pygame.time.delay(1000)
s_r,s_c = s_r_next,s_c_next # update coordinate
| wenbinli/rl | cliffWalk_QL.py | Python | mit | 6,866 | 0.026507 |
from mock import patch
from nose.tools import eq_
from test_utils import TestCase
from mozillians.funfacts.helpers import random_funfact
from mozillians.funfacts.tests import FunFactFactory
class HelperTests(TestCase):
@patch('mozillians.funfacts.helpers.FunFact.objects')
def test_helper_calls_random(self, funfact_mock):
funfact_mock.objects.random.assert_called()
def test_helper_returns_none(self):
"""Test helper returns None when no published FunFacts."""
FunFactFactory.create()
eq_(random_funfact(), None)
| glogiotatidis/mozillians-new | mozillians/funfacts/tests/test_helpers.py | Python | bsd-3-clause | 563 | 0 |
z = len
u = x = y = r = 0
def v():
global u
if z(r) < 3:
if y < 1:
c, n = r
else:
n, c = r
s = 0
else:
l, c, n = r
s = l[x - 1] + l[x] + (l[x+1] if x == z(l) else 0)
u = s + {x : sum(c[x - 1:x+2] + n[x - 1:x + 2]) - c[x],
0 : n[0] + n[1] + c[1],
z(c) - 1 : n[x - 1] + n[x] + c[x - 1]
}[x]
def d(w):
global r, x, y
a = list.append
m = [[int(i) for i in l.strip()] for l in open(w)]
n = range(1, z(m))
r = m[0:2]
v()
e = [{u:0,2: r[0][0], 3: 1}[u]]
for x in n:
v()
a(e, {u:0,2: r[1][x], 3: 1}[u])
o = [e]
for y in n:
r = m[y - 1:y + 2]
x = 0
v()
e = [{u:0,2: r[1][1], 3: 1}[u]]
for x in n:
v()
a(e, {u:0,2: r[1][x], 3: 1}[u])
a(o, e)
f = open(w, 'w')
f.write('\n'.join(''.join(map(str, q)) for q in o))
| eeue56/code-golf | game-of-life/game_of_life.py | Python | bsd-3-clause | 957 | 0.018809 |
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ldap
import logging
from ldap.filter import filter_format
import openerp.exceptions
from openerp import tools
from openerp.osv import fields, osv
from openerp import SUPERUSER_ID
from openerp.modules.registry import RegistryManager
_logger = logging.getLogger(__name__)
class CompanyLDAP(osv.osv):
_name = 'res.company.ldap'
_order = 'sequence'
_rec_name = 'ldap_server'
def get_ldap_dicts(self, cr, ids=None):
"""
Retrieve res_company_ldap resources from the database in dictionary
format.
:param list ids: Valid ids of model res_company_ldap. If not \
specified, process all resources (unlike other ORM methods).
:return: ldap configurations
:rtype: list of dictionaries
"""
if ids:
id_clause = 'AND id IN (%s)'
args = [tuple(ids)]
else:
id_clause = ''
args = []
cr.execute("""
SELECT id, company, ldap_server, ldap_server_port, ldap_binddn,
ldap_password, ldap_filter, ldap_base, "user", create_user,
ldap_tls
FROM res_company_ldap
WHERE ldap_server != '' """ + id_clause + """ ORDER BY sequence
""", args)
return cr.dictfetchall()
def connect(self, conf):
"""
Connect to an LDAP server specified by an ldap
configuration dictionary.
:param dict conf: LDAP configuration
:return: an LDAP object
"""
uri = 'ldap://%s:%d' % (conf['ldap_server'],
conf['ldap_server_port'])
connection = ldap.initialize(uri)
if conf['ldap_tls']:
connection.start_tls_s()
return connection
def authenticate(self, conf, login, password):
"""
Authenticate a user against the specified LDAP server.
In order to prevent an unintended 'unauthenticated authentication',
which is an anonymous bind with a valid dn and a blank password,
check for empty passwords explicitely (:rfc:`4513#section-6.3.1`)
:param dict conf: LDAP configuration
:param login: username
:param password: Password for the LDAP user
:return: LDAP entry of authenticated user or False
:rtype: dictionary of attributes
"""
if not password:
return False
entry = False
filter = filter_format(conf['ldap_filter'], (login,))
try:
results = self.query(conf, filter)
# Get rid of (None, attrs) for searchResultReference replies
results = [i for i in results if i[0]]
if results and len(results) == 1:
dn = results[0][0]
conn = self.connect(conf)
conn.simple_bind_s(dn, password.encode('utf-8'))
conn.unbind()
entry = results[0]
except ldap.INVALID_CREDENTIALS:
return False
except ldap.LDAPError, e:
_logger.error('An LDAP exception occurred: %s', e)
return entry
def query(self, conf, filter, retrieve_attributes=None):
"""
Query an LDAP server with the filter argument and scope subtree.
Allow for all authentication methods of the simple authentication
method:
- authenticated bind (non-empty binddn + valid password)
- anonymous bind (empty binddn + empty password)
- unauthenticated authentication (non-empty binddn + empty password)
.. seealso::
:rfc:`4513#section-5.1` - LDAP: Simple Authentication Method.
:param dict conf: LDAP configuration
:param filter: valid LDAP filter
:param list retrieve_attributes: LDAP attributes to be retrieved. \
If not specified, return all attributes.
:return: ldap entries
:rtype: list of tuples (dn, attrs)
"""
results = []
try:
conn = self.connect(conf)
conn.simple_bind_s(conf['ldap_binddn'] or '',
conf['ldap_password'].encode('utf-8') or '')
results = conn.search_st(conf['ldap_base'], ldap.SCOPE_SUBTREE,
filter, retrieve_attributes, timeout=60)
conn.unbind()
except ldap.INVALID_CREDENTIALS:
_logger.error('LDAP bind failed.')
except ldap.LDAPError, e:
_logger.error('An LDAP exception occurred: %s', e)
return results
def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry):
"""
Compose values for a new resource of model res_users,
based upon the retrieved ldap entry and the LDAP settings.
:param dict conf: LDAP configuration
:param login: the new user's login
:param tuple ldap_entry: single LDAP result (dn, attrs)
:return: parameters for a new resource of model res_users
:rtype: dict
"""
values = { 'name': ldap_entry[1]['cn'][0],
'login': login,
'company_id': conf['company']
}
return values
def get_or_create_user(self, cr, uid, conf, login, ldap_entry,
context=None):
"""
Retrieve an active resource of model res_users with the specified
login. Create the user if it is not initially found.
:param dict conf: LDAP configuration
:param login: the user's login
:param tuple ldap_entry: single LDAP result (dn, attrs)
:return: res_users id
:rtype: int
"""
user_id = False
login = tools.ustr(login.lower().strip())
cr.execute("SELECT id, active FROM res_users WHERE lower(login)=%s", (login,))
res = cr.fetchone()
if res:
if res[1]:
user_id = res[0]
elif conf['create_user']:
_logger.debug("Creating new Odoo user \"%s\" from LDAP" % login)
user_obj = self.pool['res.users']
values = self.map_ldap_attributes(cr, uid, conf, login, ldap_entry)
if conf['user']:
values['active'] = True
user_id = user_obj.copy(cr, SUPERUSER_ID, conf['user'],
default=values)
else:
user_id = user_obj.create(cr, SUPERUSER_ID, values)
return user_id
_columns = {
'sequence': fields.integer('Sequence'),
'company': fields.many2one('res.company', 'Company', required=True,
ondelete='cascade'),
'ldap_server': fields.char('LDAP Server address', required=True),
'ldap_server_port': fields.integer('LDAP Server port', required=True),
'ldap_binddn': fields.char('LDAP binddn',
help=("The user account on the LDAP server that is used to query "
"the directory. Leave empty to connect anonymously.")),
'ldap_password': fields.char('LDAP password',
help=("The password of the user account on the LDAP server that is "
"used to query the directory.")),
'ldap_filter': fields.char('LDAP filter', required=True),
'ldap_base': fields.char('LDAP base', required=True),
'user': fields.many2one('res.users', 'Template User',
help="User to copy when creating new users"),
'create_user': fields.boolean('Create user',
help="Automatically create local user accounts for new users authenticating via LDAP"),
'ldap_tls': fields.boolean('Use TLS',
help="Request secure TLS/SSL encryption when connecting to the LDAP server. "
"This option requires a server with STARTTLS enabled, "
"otherwise all authentication attempts will fail."),
}
_defaults = {
'ldap_server': '127.0.0.1',
'ldap_server_port': 389,
'sequence': 10,
'create_user': True,
}
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'ldaps': fields.one2many(
'res.company.ldap', 'company', 'LDAP Parameters', copy=True, groups="base.group_system"),
}
class users(osv.osv):
_inherit = "res.users"
def _login(self, db, login, password):
user_id = super(users, self)._login(db, login, password)
if user_id:
return user_id
registry = RegistryManager.get(db)
with registry.cursor() as cr:
cr.execute("SELECT id FROM res_users WHERE lower(login)=%s", (login,))
res = cr.fetchone()
if res:
return False
ldap_obj = registry.get('res.company.ldap')
for conf in ldap_obj.get_ldap_dicts(cr):
entry = ldap_obj.authenticate(conf, login, password)
if entry:
user_id = ldap_obj.get_or_create_user(
cr, SUPERUSER_ID, conf, login, entry)
if user_id:
break
return user_id
def check_credentials(self, cr, uid, password):
try:
super(users, self).check_credentials(cr, uid, password)
except openerp.exceptions.AccessDenied:
cr.execute('SELECT login FROM res_users WHERE id=%s AND active=TRUE',
(int(uid),))
res = cr.fetchone()
if res:
ldap_obj = self.pool['res.company.ldap']
for conf in ldap_obj.get_ldap_dicts(cr):
if ldap_obj.authenticate(conf, res[0], password):
return
raise
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Jgarcia-IAS/Fidelizacion_odoo | openerp/addons/auth_ldap/users_ldap.py | Python | agpl-3.0 | 10,747 | 0.002885 |
# -*- coding: utf-8 -*-
#
# TSgettoolbox documentation build configuration file, created by
# sphinx-quickstart on Mon Jun 10 23:11:56 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.todo",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.autosummary",
"sphinxcontrib.programoutput",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "TSgettoolbox"
copyright = "2013, Tim Cera"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = open("../VERSION", "r").readline().strip()
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = "%Y-%m-%d"
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "pyramid"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "TSgettoolboxdoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
("index", "TSgettoolbox.tex", "TSgettoolbox Documentation", "Tim Cera", "manual")
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [("index", "tsgettoolbox", "TSgettoolbox Documentation", ["Tim Cera"], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"tsgettoolbox",
"TSgettoolbox Documentation",
"Tim Cera",
"tsgettoolbox",
"One line description of project.",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| timcera/tsgettoolbox | docs/conf.py | Python | bsd-3-clause | 8,064 | 0.001736 |
# -*- coding: utf-8 -*-
from typing import Optional, Text
from mock import MagicMock, patch
from zerver.lib.test_classes import WebhookTestCase
from zerver.lib.webhooks.git import COMMITS_LIMIT
class GogsHookTests(WebhookTestCase):
STREAM_NAME = 'commits'
URL_TEMPLATE = "/api/v1/external/gogs?&api_key={api_key}"
FIXTURE_DIR_NAME = 'gogs'
def test_push(self) -> None:
expected_subject = u"try-git / master"
expected_message = u"""john [pushed](http://localhost:3000/john/try-git/compare/479e6b772b7fba19412457483f50b201286d0103...d8fce16c72a2ff56a5afc8a08645a6ce45491794) 1 commit to branch master. Commits by John (1).
* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))"""
self.send_and_test_stream_message('push', expected_subject, expected_message, HTTP_X_GOGS_EVENT='push')
def test_push_multiple_committers(self) -> None:
commit_info = u'* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))\n'
expected_subject = u"try-git / master"
expected_message = u"""john [pushed](http://localhost:3000/john/try-git/compare/479e6b772b7fba19412457483f50b201286d0103...d8fce16c72a2ff56a5afc8a08645a6ce45491794) 2 commits to branch master. Commits by Benjamin (1) and John (1).\n\n{}* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))""".format(commit_info)
self.send_and_test_stream_message('push_commits_multiple_committers', expected_subject, expected_message, HTTP_X_GOGS_EVENT='push')
def test_push_multiple_committers_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
commit_info = u'* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))\n'
expected_subject = u"try-git / master"
expected_message = u"""john [pushed](http://localhost:3000/john/try-git/compare/479e6b772b7fba19412457483f50b201286d0103...d8fce16c72a2ff56a5afc8a08645a6ce45491794) 2 commits to branch master. Commits by Benjamin (1) and John (1).\n\n{}* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))""".format(commit_info)
self.send_and_test_stream_message('push_commits_multiple_committers', expected_subject, expected_message, HTTP_X_GOGS_EVENT='push')
def test_push_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
expected_subject = u"try-git / master"
expected_message = u"""john [pushed](http://localhost:3000/john/try-git/compare/479e6b772b7fba19412457483f50b201286d0103...d8fce16c72a2ff56a5afc8a08645a6ce45491794) 1 commit to branch master. Commits by John (1).
* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))"""
self.send_and_test_stream_message('push', expected_subject, expected_message, HTTP_X_GOGS_EVENT='push')
def test_push_commits_more_than_limits(self) -> None:
expected_subject = u"try-git / master"
commits_info = "* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))\n"
expected_message = u"john [pushed](http://localhost:3000/john/try-git/compare/479e6b772b7fba19412457483f50b201286d0103...d8fce16c72a2ff56a5afc8a08645a6ce45491794) 30 commits to branch master. Commits by John (30).\n\n{}[and {} more commit(s)]".format(
commits_info * COMMITS_LIMIT,
30 - COMMITS_LIMIT
)
self.send_and_test_stream_message('push_commits_more_than_limits', expected_subject, expected_message, HTTP_X_GOGS_EVENT='push')
def test_push_commits_more_than_limits_filtered_by_branches(self) -> None:
self.url = self.build_webhook_url(branches='master,development')
expected_subject = u"try-git / master"
commits_info = "* Webhook Test ([d8fce16](http://localhost:3000/john/try-git/commit/d8fce16c72a2ff56a5afc8a08645a6ce45491794))\n"
expected_message = u"john [pushed](http://localhost:3000/john/try-git/compare/479e6b772b7fba19412457483f50b201286d0103...d8fce16c72a2ff56a5afc8a08645a6ce45491794) 30 commits to branch master. Commits by John (30).\n\n{}[and {} more commit(s)]".format(
commits_info * COMMITS_LIMIT,
30 - COMMITS_LIMIT
)
self.send_and_test_stream_message('push_commits_more_than_limits', expected_subject, expected_message, HTTP_X_GOGS_EVENT='push')
def test_new_branch(self) -> None:
expected_subject = u"try-git / my_feature"
expected_message = u"john created [my_feature](http://localhost:3000/john/try-git/src/my_feature) branch"
self.send_and_test_stream_message('branch', expected_subject, expected_message, HTTP_X_GOGS_EVENT='create')
def test_pull_request_opened(self) -> None:
expected_subject = u"try-git / PR #1 Title Text for Pull Request"
expected_message = u"""john opened [PR #1](http://localhost:3000/john/try-git/pulls/1)
from `feature` to `master`"""
self.send_and_test_stream_message('pull_request_opened', expected_subject, expected_message, HTTP_X_GOGS_EVENT='pull_request')
def test_pull_request_closed(self) -> None:
expected_subject = u"try-git / PR #1 Title Text for Pull Request"
expected_message = u"""john closed [PR #1](http://localhost:3000/john/try-git/pulls/1)
from `feature` to `master`"""
self.send_and_test_stream_message('pull_request_closed', expected_subject, expected_message, HTTP_X_GOGS_EVENT='pull_request')
def test_pull_request_merged(self) -> None:
expected_subject = u"try-git / PR #2 Title Text for Pull Request"
expected_message = u"""john merged [PR #2](http://localhost:3000/john/try-git/pulls/2)
from `feature` to `master`"""
self.send_and_test_stream_message('pull_request_merged', expected_subject, expected_message, HTTP_X_GOGS_EVENT='pull_request')
@patch('zerver.webhooks.gogs.view.check_send_stream_message')
def test_push_filtered_by_branches_ignore(self, check_send_stream_message_mock: MagicMock) -> None:
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('push')
result = self.client_post(self.url, payload, HTTP_X_GOGS_EVENT='push',
content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.gogs.view.check_send_stream_message')
def test_push_commits_more_than_limits_filtered_by_branches_ignore(
self, check_send_stream_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('push_commits_more_than_limits')
result = self.client_post(self.url, payload, HTTP_X_GOGS_EVENT='push',
content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
@patch('zerver.webhooks.gogs.view.check_send_stream_message')
def test_push_multiple_committers_filtered_by_branches_ignore(
self, check_send_stream_message_mock):
# type: (MagicMock) -> None
self.url = self.build_webhook_url(branches='changes,development')
payload = self.get_body('push_commits_multiple_committers')
result = self.client_post(self.url, payload, HTTP_X_GOGS_EVENT='push',
content_type="application/json")
self.assertFalse(check_send_stream_message_mock.called)
self.assert_json_success(result)
| mahim97/zulip | zerver/webhooks/gogs/tests.py | Python | apache-2.0 | 7,893 | 0.003547 |
"""Tests for views of tracker application."""
from django.contrib.auth import get_user_model
from django.core.urlresolvers import reverse
from django.test import TestCase
from tracker.models import (Expenditure, Purse)
User = get_user_model()
class HomeTest(TestCase):
"""Test home view."""
def setUp(self):
self.url = reverse('tracker:home')
def test_get(self):
"""Get home view."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
def create_user(**kwargs):
"""Create a user."""
u = User.objects.create_user(**kwargs)
u.save()
return u
def create_purse(user=None, **kwargs):
"""Create a purse.
If user is not None, add it to the created purse.
"""
p = Purse.objects.create(**kwargs)
p.save()
if user is not None:
p.users.add(user)
return p
def create_expenditure(**kwargs):
"""Create an expenditure."""
e = Expenditure.objects.create(**kwargs)
e.save()
return e
class ExpenditureAddTest(TestCase):
"""Test expenditure add view."""
def setUp(self):
self.url = reverse('tracker:add')
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
url = '/tracker/login?next=/tracker/expenditures/add/'
self.assertEqual(response.url, url)
def test_get_authentified_without_purse(self):
"""Get page while user is authentified but has no purse.
"""
credentials = {'username': 'username',
'password': 'password'}
create_user(**credentials)
self.client.login(**credentials)
response = self.client.get(self.url)
expected_url = '/tracker/purses/create/'
self.assertRedirects(response, expected_url)
def test_get_authentified_without_default_purse(self):
"""Get page while user is authentified but has no default purse."""
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
self.client.login(**credentials)
create_purse(u)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
# self.assertEqual(u.default_purse, p)
# TODO Check messages
def test_post(self):
"""Get page then post."""
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
self.client.login(**credentials)
p = create_purse(u)
u.default_purse = p
u.save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
token = response.cookies['csrftoken'].value
data = {'amount': 100,
'date': '24/05/2014',
'description': 'expenditure description',
'occurrences': '1',
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = '/tracker/expenditures/'
self.assertEqual(response.url, url)
self.assertEqual(u.expenditure_set.count(), 1)
def test_post_and_save_other(self):
"""Get page then post and save other."""
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
self.client.login(**credentials)
p = create_purse(u)
u.default_purse = p
u.save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
token = response.cookies['csrftoken'].value
data = {'amount': 300,
'date': '25/05/2014',
'description': 'other expenditure description',
'occurrences': '1',
'save_other': True,
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = self.url + '?date=2014-05-25'
self.assertEqual(response.url, url)
self.assertEqual(u.expenditure_set.count(), 1)
def test_post_with_multiple_occurence(self):
"""Get page then post to create multiple expenditures."""
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
self.client.login(**credentials)
p = create_purse(u)
u.default_purse = p
u.save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
token = response.cookies['csrftoken'].value
data = {'amount': 100,
'date': '24/05/2014',
'description': 'expenditure description',
'occurrences': '3',
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = '/tracker/expenditures/'
self.assertEqual(response.url, url)
self.assertEqual(u.expenditure_set.count(), 3)
class ExpenditureDeleteTest(TestCase):
"""Test expenditure delete view."""
def setUp(self):
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
p = create_purse(u)
u.default_purse = p
u.save()
e = Expenditure.objects.create(amount=199,
author=u,
purse=p)
self.url = reverse('tracker:delete', kwargs={'pk': e.pk})
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
expected_url = '/tracker/login?next='
expected_url += self.url
self.assertEqual(response.url, expected_url)
def test_get_authentified(self):
"""Get page then delete resource while user is authentified."""
credentials = {'username': 'username',
'password': 'password'}
self.client.login(**credentials)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 302)
self.assertEqual(Expenditure.objects.count(), 0)
self.assertEqual(response.url,
'/tracker/expenditures/')
class ExpenditureUpdateTest(TestCase):
"""Test expenditure update view."""
def setUp(self):
credentials = {'username': 'username',
'password': 'password'}
self.u = create_user(**credentials)
p = create_purse(self.u)
self.u.default_purse = p
self.u.save()
e = Expenditure.objects.create(amount=199,
author=self.u,
purse=p)
self.url = reverse('tracker:update', kwargs={'pk': e.pk})
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
expected_url = '/tracker/login?next='
expected_url += self.url
self.assertEqual(response.url, expected_url)
def test_get_authentified(self):
"""Get page then update resource while user is authentified."""
credentials = {'username': 'username',
'password': 'password'}
self.client.login(**credentials)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
token = response.cookies['csrftoken'].value
data = {'amount': 100,
'date': '24/05/2014',
'description': 'expenditure description',
'occurrences': '1',
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = '/tracker/expenditures/'
self.assertEqual(response.url, url)
self.assertEqual(self.u.expenditure_set.count(), 1)
e = self.u.expenditure_set.all()[0]
self.assertEqual(e.amount, 100)
self.assertEqual(e.description, 'expenditure description')
self.assertEqual(len(e.tag_set.all()), 2)
class PurseCreationTest(TestCase):
"""Test purse creation view."""
def setUp(self):
self.url = reverse('tracker:purse_creation')
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
url = '/tracker/login?next=/tracker/purses/create/'
self.assertEqual(response.url, url)
def test_get_authentified(self):
"""Get page for authentified user."""
credentials = {'username': 'username',
'password': 'password'}
create_user(**credentials)
self.client.login(**credentials)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
def test_post_first_purse(self):
"""Get page then post to create a first purse."""
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
self.client.login(**credentials)
response = self.client.get(self.url)
token = response.cookies['csrftoken'].value
data = {'name': 'Tes',
'description': 'The purse description',
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = '/tracker/expenditures/'
self.assertEqual(response.url, url)
self.assertEqual(u.purse_set.count(), 1)
def test_post(self):
"""Get page then post."""
credentials = {'username': 'username',
'password': 'password'}
u = create_user(**credentials)
create_purse(u)
self.client.login(**credentials)
response = self.client.get(self.url)
token = response.cookies['csrftoken'].value
data = {'name': 'Second purse',
'description': 'The purse description',
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = '/tracker/purses/'
self.assertEqual(response.url, url)
u = User.objects.get(username='username')
self.assertEqual(u.purse_set.count(), 2)
self.assertEqual(u.default_purse.name, 'Second purse')
class PurseUpdateTest(TestCase):
"""Test purse update view."""
def setUp(self):
self.credentials = {'username': 'username',
'password': 'password'}
u = create_user(**self.credentials)
create_purse(u)
self.url = reverse('tracker:purse_update', kwargs={'pk': u.pk})
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
url = '/tracker/logout/'
self.assertEqual(response.url, url)
def test_get_authentified(self):
"""Get page for authentified user."""
self.client.login(**self.credentials)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
def test_get_authentified_not_in_purse(self):
"""Get page for authentified user not in purse."""
credentials = {'username': 'other',
'password': 'password'}
create_user(**credentials)
self.client.login(**credentials)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
url = '/tracker/logout/'
self.assertEqual(response.url, url)
def test_post(self):
"""Post a new purse name."""
self.client.login(**self.credentials)
response = self.client.get(self.url)
token = response.cookies['csrftoken'].value
data = {'name': 'New purse name',
'csrftoken': token}
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
url = '/tracker/purses/'
self.assertEqual(response.url, url)
u = User.objects.get(username='username')
self.assertTrue(u.purse_set.values_list('name', flat=True),
['New purse name'])
class ExpenditureFilteredListTest(TestCase):
"""Test expenditure filtered list."""
def setUp(self):
self.credentials = {'username': 'username',
'password': 'password'}
create_user(**self.credentials)
self.url = reverse('tracker:expenditure-search')
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
url = '/tracker/login'
url += '?next=/tracker/expenditures/search/'
self.assertEqual(response.url, url)
def test_get_authentified_without_purse(self):
"""Get page while user is authentified but has no purse."""
self.client.login(**self.credentials)
response = self.client.get(self.url)
expected_url = '/tracker/purses/create/'
self.assertRedirects(response, expected_url)
def test_get_authentified_without_default_purse(self):
"""Get page while user is authentified but has no default purse."""
self.client.login(**self.credentials)
u = User.objects.get(username='username')
create_purse(u)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
# self.assertEqual(u.default_purse, p)
# TODO Check messages
def test_get(self):
"""Get page for authentified user with purse."""
self.client.login(**self.credentials)
u = User.objects.get(username='username')
p = create_purse(u)
desc = 'Uniquedesc'
# REMARK First letter must be in uppercase
create_expenditure(**{'amount': 100,
'date': '2014-12-2',
'description': desc,
'author': u,
'purse': p})
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, desc)
def test_get_single_keyword(self):
"""Get page for a single filter keyword."""
self.client.login(**self.credentials)
u = User.objects.get(username='username')
p = create_purse(u)
create_expenditure(**{'amount': 100,
'date': '2014-12-2',
'description': 'firstdesc uniqueterm',
'author': u,
'purse': p})
create_expenditure(**{'amount': 100,
'date': '2014-12-2',
'description': 'otherdesc',
'author': u,
'purse': p})
response = self.client.get(self.url + '?filter=firstdesc')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'uniqueterm')
self.assertNotContains(response, 'otherterm')
def test_get_num_keyword(self):
"""Get page for a single filter keyword convertible to float."""
self.client.login(**self.credentials)
u = User.objects.get(username='username')
p = create_purse(u)
create_expenditure(**{'amount': 120.45,
'date': '2014-12-2',
'description': 'firstdesc uniqueterm',
'author': u,
'purse': p})
create_expenditure(**{'amount': 100,
'date': '2014-12-2',
'description': 'otherdesc',
'author': u,
'purse': p})
create_expenditure(**{'amount': 120.45,
'date': '2014-11-7',
'description': 'lastdesc',
'author': u,
'purse': p})
response = self.client.get(self.url + '?filter=120.45')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'uniqueterm')
self.assertNotContains(response, 'otherdesc')
self.assertContains(response, 'Lastdesc')
# REMARK Note that descriptions are capitalized
def test_get_multiple_keywords(self):
"""Get page for multiple filter keywords."""
self.client.login(**self.credentials)
u = User.objects.get(username='username')
p = create_purse(u)
create_expenditure(**{'amount': 120.45,
'date': '2014-12-2',
'description': 'firstdesc uniqueterm',
'author': u,
'purse': p})
create_expenditure(**{'amount': 100,
'date': '2014-12-2',
'description': 'otherdesc',
'author': u,
'purse': p})
create_expenditure(**{'amount': 120.45,
'date': '2014-12-7',
'description': 'lastdesc',
'author': u,
'purse': p})
response = self.client.get(self.url + '?filter=120.45+unique')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'uniqueterm')
self.assertNotContains(response, 'otherterm')
self.assertNotContains(response, 'lastdesc')
class PurseDeletionTest(TestCase):
"""Test purse deletion view."""
def setUp(self):
self.credentials = {'username': 'username',
'password': 'password'}
u = create_user(**self.credentials)
p = create_purse(u)
self.url = reverse('tracker:purse_delete',
kwargs={'pk': p.pk})
def test_get_non_authentified(self):
"""Get page while no user is authentified."""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 302)
url = '/tracker/login?next=/tracker/purses/delete/1/'
self.assertEqual(response.url, url)
def test_get_authentified(self):
"""Get page for authentified user."""
self.client.login(**self.credentials)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
response = self.client.delete(self.url)
self.assertEqual(response.status_code, 302)
self.assertEqual(Purse.objects.count(), 0)
| orontee/porte-monnaie | site/tracker/tests/test_views.py | Python | gpl-3.0 | 19,399 | 0 |
###############################################################################
# Name: pike.py #
# Purpose: Define highlighting/syntax for Pike programming language #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2007 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
FILE: pike.py
@summary: Defines syntax and highlighting settings for the Pike programming
language. Pike is very similar in form to C/CPP so the Cpp lexer is
used to provide the highlighting settings.
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: pike.py 55174 2008-08-22 15:12:27Z CJP $"
__revision__ = "$Revision: 55174 $"
#-----------------------------------------------------------------------------#
# Local Imports
import synglob
import cpp
#-----------------------------------------------------------------------------#
#---- Keyword Definitions ----#
PIKE_KW = (0, "goto break return continue case default if else switch while "
"foreach do gauge destruct lambda inherit import typeof catch "
"for inline nomask")
PIKE_TYPE = (1, "private protected public static "
"int string void float mapping array multiset mixed program "
"object function")
PIKE_DOC = tuple(cpp.DOC_KEYWORDS)
#---- End Keyword Definitions ----#
#---- Syntax Style Specs ----#
SYNTAX_ITEMS = list(cpp.SYNTAX_ITEMS)
#---- Extra Properties ----#
# Fetched from cpp module on request
#-----------------------------------------------------------------------------#
#---- Required Module Functions ----#
def Keywords(lang_id=0):
"""Returns Specified Keywords List
@keyword lang_id: used to select specific subset of keywords
"""
if lang_id == synglob.ID_LANG_PIKE:
return [PIKE_KW, PIKE_TYPE, PIKE_DOC]
else:
return list()
def SyntaxSpec(lang_id=0):
"""Syntax Specifications
@keyword lang_id: used for selecting a specific subset of syntax specs
"""
if lang_id == synglob.ID_LANG_PIKE:
return SYNTAX_ITEMS
else:
return list()
def Properties(lang_id=0):
"""Returns a list of Extra Properties to set
@keyword lang_id: used to select a specific set of properties
"""
if lang_id == synglob.ID_LANG_PIKE:
return cpp.Properties(synglob.ID_LANG_CPP)
else:
return list()
def CommentPattern(lang_id=0):
"""Returns a list of characters used to comment a block of code
@keyword lang_id: used to select a specific subset of comment pattern(s)
"""
if lang_id == synglob.ID_LANG_PIKE:
return cpp.CommentPattern(synglob.ID_LANG_CPP)
else:
return list()
#---- End Required Module Functions ----#
AutoIndenter = cpp.AutoIndenter
#---- Syntax Modules Internal Functions ----#
def KeywordString():
"""Returns the specified Keyword String
@note: not used by most modules
"""
return None
#---- End Syntax Modules Internal Functions ----#
| robmcmullen/peppy | editra.in/syntax/pike.py | Python | gpl-2.0 | 3,246 | 0.005237 |
# __init__.py: Yet Another Bayes Net library
# Contact: Jacob Schreiber ( jmschreiber91@gmail.com )
"""
For detailed documentation and examples, see the README.
"""
# Make our dependencies explicit so compiled Cython code won't segfault trying
# to load them.
import networkx, matplotlib.pyplot, scipy
import numpy as np
import os
import pyximport
# Adapted from Cython docs https://github.com/cython/cython/wiki/
# InstallingOnWindows#mingw--numpy--pyximport-at-runtime
if os.name == 'nt':
if 'CPATH' in os.environ:
os.environ['CPATH'] = os.environ['CPATH'] + np.get_include()
else:
os.environ['CPATH'] = np.get_include()
# XXX: we're assuming that MinGW is installed in C:\MinGW (default)
if 'PATH' in os.environ:
os.environ['PATH'] = os.environ['PATH'] + ';C:\MinGW\bin'
else:
os.environ['PATH'] = 'C:\MinGW\bin'
mingw_setup_args = { 'options': { 'build_ext': { 'compiler': 'mingw32' } } }
pyximport.install(setup_args=mingw_setup_args)
elif os.name == 'posix':
if 'CFLAGS' in os.environ:
os.environ['CFLAGS'] = os.environ['CFLAGS'] + ' -I' + np.get_include()
else:
os.environ['CFLAGS'] = ' -I' + np.get_include()
pyximport.install()
from yabn import *
__version__ = '0.1.0' | jmschrei/yabn | yabn/__init__.py | Python | mit | 1,279 | 0.009382 |
#####
import sys
import inspect
from pylons import config
import logging
import zkpylons.lib.helpers as h
from pylons import request, response, session, tmpl_context as c
from zkpylons.lib.helpers import redirect_to
from pylons.util import class_name_from_module_name
from zkpylons.model import meta
from pylons.controllers.util import abort
from zkpylons.lib.base import BaseController, render
from zkpylons.model import URLHash
log = logging.getLogger(__name__)
class SecretHashController(BaseController):
def lookup(self, hash):
c.hash = URLHash.find_by_hash(hash)
if c.hash is None:
abort(404, "Sorry, Invalid Hash.")
return self.transfer(url=c.hash.url)
# as per http://www.mail-archive.com/pylons-discuss@googlegroups.com/msg06643.html
def transfer(controller = None, action = None, url = None, **kwargs):
"""usage:
1. result = transfer(url = "/someurl/someaction")
2. result = transfer(controller = "/controller1/sub_controller2",
action = "test") # kwargs will pass to action.
"""
if (url != None):
route_map = config['routes.map']
match_route= route_map.match(url)
if (match_route == None):
raise(Exception("no route matched url '%s'" % url))
# if
controller = match_route["controller"].replace("/", ".")
action = match_route["action"]
del(match_route["controller"])
del(match_route["action"])
kwargs.update(match_route)
else:
if (controller == None):
route_map = config['routes.map']
match_route = route_map.match("/")
if (match_route == None):
raise(Exception("no route matched url '%s'" % url))
# if
controller = match_route["controller"].replace("/", ".")
if (action == None):
action = match_route["action"]
# if
del(match_route["controller"])
del(match_route["action"])
kwargs.update(match_route)
else:
controller = controller.replace("/", ".")
if (action == None):
action = "index"
# if
# if
# if
full_module_name = config['pylons.package'] + '.controllers.' + controller
__traceback_hide__ = 'before_and_this'
try:
__import__(full_module_name)
except ImportError, e:
raise(NotImplementedError("'%s' not found: %s" % (controller, e)))
# try
module_name = controller.split('.')[-1]
class_name = class_name_from_module_name(module_name) + 'Controller'
controller_class = getattr(sys.modules[full_module_name], class_name)
controller_inst = controller_class()
if (hasattr(controller_inst, action)):
action_method = getattr(controller_inst, action, None)
#if (not isinstance(action_method, types.MethodType)):
# raise(NotImplementedError("action '%s' not found in '%s'" % (action, controller)))
# if
if (hasattr(controller_inst, "__before__")):
before_method = getattr(controller_inst, "__before__", None)
#if (isinstance(before_method, types.MethodType)):
# before_method(action)
# if
# if
action_args_name, action_args, action_kargs, action_defaults = inspect.getargspec(action_method)
del(action_args_name[0])
call_kargs = {}
for k, v in kwargs.iteritems():
if (k in action_args_name):
call_kargs[k] = v
# if
# for
result = action_method(**call_kargs)
if (hasattr(controller_inst, "__after__")):
after_method = getattr(controller_inst, "__after__", None)
#if (isinstance(after_method, types.MethodType)):
# after_method(action)
# if
# if
return(result)
else:
raise(NotImplementedError("action '%s' not found in '%s'" % (action, controller)))
# if
# def
| neillc/zookeepr | zkpylons/controllers/secret_hash.py | Python | gpl-2.0 | 4,319 | 0.005094 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import zipfile
DEV_DATA_PATH = os.path.join(
os.path.dirname(__file__),
'..',
'dev_data',
)
def data_path(*args):
"""
Returns a path to dev data
"""
return os.path.join(DEV_DATA_PATH, *args)
def words100k():
zip_name = data_path('words100k.txt.zip')
zf = zipfile.ZipFile(zip_name)
txt = zf.open(zf.namelist()[0]).read().decode('utf8')
return txt.splitlines()
| kmike/DAWG-Python | tests/utils.py | Python | mit | 479 | 0.004175 |
#!/usr/bin/env python
""" genpyx.py - parse c declarations
(c) 2002, 2003, 2004, 2005 Simon Burton <simon@arrowtheory.com>
Released under GNU LGPL license.
version 0.xx
This is a module of mixin classes for ir.py .
Towards the end of ir.py our global class definitions
are remapped to point to the class definitions in ir.py .
So, for example, when we refer to Node we get ir.Node .
"""
import sys
from datetime import datetime
# XX use this Context class instead of all those kw dicts !! XX
class Context(object):
" just a record (struct) "
def __init__( self, **kw ):
for key, value in kw.items():
setattr( self, key, value )
def __getattr__( self, name ):
return None # ?
def __getitem__( self, name ):
return getattr(self, name)
class OStream(object):
def __init__( self, filename=None ):
self.filename = filename
self.tokens = []
self._indent = 0
def put( self, token="" ):
assert type(token) is str
self.tokens.append( token )
def startln( self, token="" ):
assert type(token) is str
self.tokens.append( ' '*self._indent + token )
def putln( self, ln="" ):
assert type(ln) is str
self.tokens.append( ' '*self._indent + ln + '\n')
def endln( self, token="" ):
assert type(token) is str
self.tokens.append( token + '\n')
def indent( self ):
self._indent += 1
def dedent( self ):
self._indent -= 1
assert self._indent >= 0, self._indent
def join( self ):
return ''.join( self.tokens )
def close( self ):
s = ''.join( self.tokens )
f = open( self.filename, 'w' )
f.write(s)
#
###############################################################################
#
class Node(object):
"""
tree structure
"""
_unique_id = 0
def get_unique_id(cls):
Node._unique_id += 1
return Node._unique_id
get_unique_id = classmethod(get_unique_id)
# XX toks: use a tree of tokens: a list that can be push'ed and pop'ed XX
def pyxstr(self,toks=None,indent=0,**kw):
"""
Build a list of tokens; return the joined tokens string
"""
if toks is None:
toks = []
for x in self:
if isinstance(x,Node):
x.pyxstr(toks, indent, **kw)
else:
toks.insert(0,str(x)+' ')
s = ''.join(toks)
return s
#
#################################################
class Named(object):
"has a .name property"
pass
class BasicType(object):
"float double void char int"
pass
class Qualifier(object):
"register signed unsigned short long const volatile inline"
def pyxstr(self,toks=None,indent=0,**kw):
if toks is None:
toks = []
x = self[0]
if x not in ( 'const','volatile','inline','register'): # ignore these
toks.insert(0,str(x)+' ')
s = ''.join(toks)
return s
class StorageClass(object):
"extern static auto"
def pyxstr(self,toks=None,indent=0,**kw):
return ""
class Ellipses(object):
"..."
pass
class GCCBuiltin(BasicType):
"things with __builtin prefix"
pass
class Identifier(object):
"""
"""
def pyxstr(self,toks=None,indent=0,**kw):
if toks is None:
toks=[]
if self.name:
toks.append( self.name )
return " ".join(toks)
class TypeAlias(object):
"""
typedefed things, eg. size_t
"""
def pyxstr(self,toks=None,indent=0,cprefix="",**kw):
if toks is None:
toks = []
for x in self:
if isinstance(x,Node):
x.pyxstr(toks, indent, cprefix=cprefix, **kw)
else:
s = str(x)+' '
if cprefix:
s = cprefix+s
toks.insert(0,s)
s = ''.join(toks)
return s
class Function(object):
"""
"""
def pyxstr(self,toks,indent=0,**kw):
#print '%s.pyxstr(%s)'%(self,toks)
_toks=[]
assert len(self)
i=0
while isinstance(self[i],Declarator):
if not self[i].is_void():
_toks.append( self[i].pyxstr(indent=indent, **kw) )
i=i+1
toks.append( '(%s)'% ', '.join(_toks) )
while i<len(self):
self[i].pyxstr(toks, indent=indent, **kw)
i=i+1
return " ".join(toks)
class Pointer(object):
"""
"""
def pyxstr(self,toks,indent=0,**kw):
assert len(self)
node=self[0]
toks.insert(0,'*')
if isinstance(node,Function):
toks.insert(0,'(')
toks.append(')')
elif isinstance(node,Array):
toks.insert(0,'(')
toks.append(')')
return Node.pyxstr(self,toks,indent, **kw)
class Array(object):
"""
"""
def pyxstr(self,toks,indent=0,**kw):
if self.size is None:
toks.append('[]')
else:
try:
int(self.size)
toks.append('[%s]'%self.size)
except:
toks.append('[]')
return Node( *self[:-1] ).pyxstr( toks,indent, **kw )
class Tag(object):
" the tag of a Struct, Union or Enum "
pass
class Taged(object):
"Struct, Union or Enum "
pass
class Compound(Taged):
"Struct or Union"
def pyxstr(self,_toks=None,indent=0,cprefix="",shadow_name=True,**kw):
if _toks is None:
_toks=[]
names = kw.get('names',{})
kw['names'] = names
tag_lookup = kw.get('tag_lookup')
if self.tag:
tag=self.tag.name
else:
tag = ''
if isinstance(self,Struct):
descr = 'struct'
elif isinstance(self,Union):
descr = 'union'
_node = names.get(self.tag.name,None)
if ( _node is not None and _node.has_members() ) or \
( _node is not None and not self.has_members() ):
descr = '' # i am not defining myself here
#print "Compound.pyxstr", tag
#print self.deepstr()
if descr:
if cprefix and shadow_name:
tag = '%s%s "%s"'%(cprefix,tag,tag)
elif cprefix:
tag = cprefix+tag
toks = [ descr+' '+tag ] # struct foo
if self.has_members():
toks.append(':\n')
for decl in self[1:]: # XX self.members
toks.append( decl.pyxstr(indent=indent+1, cprefix=cprefix, shadow_name=shadow_name, **kw)+"\n" ) # shadow_name = False ?
#elif not tag_lookup.get( self.tag.name, self ).has_members():
# define empty struct here, it's the best we're gonna get
#pass
else:
if cprefix: # and shadow_name:
tag = cprefix+tag
toks = [ ' '+tag+' ' ] # foo
while toks:
_toks.insert( 0, toks.pop() )
return "".join( _toks )
class Struct(Compound):
"""
"""
pass
class Union(Compound):
"""
"""
pass
class Enum(Taged):
"""
"""
def pyxstr(self,_toks=None,indent=0,cprefix="",shadow_name=True,**kw):
if _toks is None:
_toks=[]
names = kw.get('names',{})
kw['names'] = names
if self.tag:
tag=self.tag.name
else:
tag = ''
_node = names.get(self.tag.name,None)
if ( _node is not None and _node.has_members() ) or \
( _node is not None and not self.has_members() ):
descr = '' # i am not defining myself here
else:
descr = 'enum'
if descr:
#if not names.has_key(self.tag.name):
toks = [ descr+' '+tag ] # enum foo
toks.append(':\n')
idents = [ ident for ident in self.members if ident.name not in names ]
for ident in idents:
if cprefix and shadow_name:
ident = ident.clone()
ident.name = '%s%s "%s"' % ( cprefix, ident.name, ident.name )
#else: assert 0
toks.append( ' '+' '*indent + ident.pyxstr(**kw)+"\n" )
names[ ident.name ] = ident
if not idents:
# empty enum def'n !
#assert 0 # should be handled by parents...
toks.append( ' '+' '*indent + "pass\n" )
else:
toks = [ ' '+tag+' ' ] # foo
while toks:
_toks.insert( 0, toks.pop() )
return "".join( _toks )
class Declarator(object):
def is_pyxnative( self ):
# pyrex handles char* too
# but i don't know if we should make this the default
# sometimes we want to send a NULL, so ... XX
self = self.cbasetype() # WARNING: cbasetype may be cached
if self.is_void():
return False
if self.is_primative():
return True
if self.enum:
return True
#pointer = None
#if self.pointer:
#pointer = self.pointer
#elif self.array:
#pointer = self.array
#if pointer and pointer.spec:
#spec = pointer.spec
#if BasicType("char") in spec and not Qualifier("unsigned") in spec:
# char*, const char*
##print self.deepstr()
#return True
return False
def _pyxstr( self, toks, indent, cprefix, use_cdef, shadow_name, **kw ):
" this is the common part of pyxstr that gets called from both Declarator and Typedef "
names = kw.get('names',{}) # what names have been defined ?
kw['names']=names
for node in self.nodes(): # depth-first
if isinstance(node,Taged):
#print "Declarator.pyxstr", node.cstr()
if not node.tag.name:
node.tag.name = "_anon_%s" % Node.get_unique_id()
_node = names.get(node.tag.name,None)
#tag_lookup = kw.get('tag_lookup')
#other = tag_lookup.get(node.tag.name, node)
#if ((_node is None and (not isinstance(other,Compound) or not other.has_members()))
# or node.has_members()):
if _node is None or node.has_members():
# either i am not defined at all, or this is my _real_ definition
# emit def'n of this node
#if isinstance(self,Typedef):
#toks.append( ' '*indent + 'ctypedef ' + node.pyxstr(indent=indent, cprefix=cprefix, shadow_name=shadow_name, **kw).strip() )
#else:
toks.append( ' '*indent + 'cdef ' + node.pyxstr(indent=indent, cprefix=cprefix, shadow_name=shadow_name, **kw).strip() )
names[ node.tag.name ] = node
elif isinstance(node,GCCBuiltin) and node[0] not in names:
#toks.append( ' '*indent + 'ctypedef long ' + node.pyxstr(indent=indent, **kw).strip() + ' # XX ??' ) # XX ??
toks.append( ' '*indent + 'struct __unknown_builtin ' )
toks.append( ' '*indent + 'ctypedef __unknown_builtin ' + node.pyxstr(indent=indent, **kw).strip() )
names[ node[0] ] = node
for idx, child in enumerate(node):
if type(child)==Array and not child.has_size():
# mutate this mystery array into a pointer XX method: Array.to_pointer()
node[idx] = Pointer()
node[idx].init_from( child ) # warning: shallow init
node[idx].pop() # pop the size element
def pyxstr(self,toks=None,indent=0,cprefix="",use_cdef=True,shadow_name=True,**kw):
" note: i do not check if my name is already in 'names' "
self = self.clone() # <----- NOTE
toks=[]
names = kw.get('names',{}) # what names have been defined ?
kw['names']=names
self._pyxstr( toks, indent, cprefix, use_cdef, shadow_name, **kw )
if self.name and not names.has_key( self.name ):
names[ self.name ] = self
if self.identifier is not None:
comment = ""
if self.name in python_kws:
comment = "#"
if cprefix and use_cdef and shadow_name:
# When we are defining this guy, we refer to it using the pyrex shadow syntax.
self.name = '%s%s "%s" ' % ( cprefix, self.name, self.name )
cdef = 'cdef '
if not use_cdef: cdef = '' # sometimes we don't want the cdef (eg. in a cast)
# this may need shadow_name=False:
toks.append( ' '*indent + comment + cdef + Node.pyxstr(self,indent=indent, cprefix=cprefix, **kw).strip() ) # + "(cprefix=%s)"%cprefix)
#else: i am just a struct def (so i already did that) # huh ?? XX bad comment
return ' \n'.join(toks)
def pyxsym(self, ostream, names=None, tag_lookup=None, cprefix="", modname=None, cobjects=None):
assert self.name is not None, self.deepstr()
ostream.putln( '# ' + self.cstr() )
# This cdef is no good: it does not expose a python object
# and we can't reliably set a global var
#ostream.putln( 'cdef %s %s' % ( self.pyx_adaptor_decl(cobjects), self.name ) ) # _CObject
#ostream.putln( '%s = %s()' % (self.name, self.pyx_adaptor_name(cobjects)) )
#ostream.putln( '%s.p = <void*>&%s' % (self.name, cprefix+self.name) )
## expose a python object:
#ostream.putln( '%s.%s = %s' % (modname,self.name, self.name) )
ostream.putln( '%s = %s( addr = <long>&%s )' % (self.name, self.pyx_adaptor_name(cobjects), cprefix+self.name) )
return ostream
class Typedef(Declarator):
def pyxstr(self,toks=None,indent=0,cprefix="",use_cdef=True,shadow_name=True,**kw): # shadow_name=True
" warning: i do not check if my name is already in 'names' "
assert shadow_name == True
self = self.clone() # <----- NOTE
toks=[]
names = kw.get('names',{}) # what names have been defined ?
kw['names']=names
#if self.tagged and not self.tagged.tag.name:
## "typedef struct {...} foo;" => "typedef struct foo {...} foo;"
## (to be emitted in the node loop below, and suppressed in the final toks.append)
#self.tagged.tag = Tag( self.name ) # this is how pyrex does it: tag.name == self.name
# XX that doesn't work (the resulting c fails to compile) XX
self._pyxstr( toks, indent, cprefix, use_cdef, shadow_name, **kw )
#print self.deepstr()
if self.name and not names.has_key( self.name ):
names[ self.name ] = self
if not (self.tagged and self.name == self.tagged.tag.name):
comment = ""
if self.name in python_kws:
comment = "#"
#if cprefix:
# self.name = '%s%s "%s" ' % ( cprefix, self.name, self.name ) # XX pyrex can't do this
if cprefix: # shadow_name=True
# My c-name gets this prefix. See also TypeAlias.pyxstr(): it also prepends the cprefix.
self.name = '%s%s "%s" ' % ( cprefix, self.name, self.name )
toks.append( ' '*indent + comment + 'ctypedef ' + Node.pyxstr(self,indent=indent, cprefix=cprefix, **kw).strip() )
return ' \n'.join(toks)
class AbstractDeclarator(Declarator):
""" used in Function; may lack an identifier """
def pyxstr(self,toks=None,indent=0,**kw):
if self.name in python_kws:
# Would be better to do this in __init__, but our subclass doesn't call our __init__.
self.name = '_' + self.name
#return ' '*indent + Node.pyxstr(self,toks,indent, **kw).strip()
return Node.pyxstr(self,toks,indent, **kw).strip()
class FieldLength(object):
"""
"""
def pyxstr(self,toks,indent,**kw):
pass
class StructDeclarator(Declarator): # also used in Union
"""
"""
def pyxstr(self,toks=None,indent=0,**kw):
comment = ""
if self.name in python_kws:
comment = "#"
return ' '*indent + comment + Node.pyxstr(self,toks,indent, **kw).strip()
class DeclarationSpecifiers(object):
"""
"""
pass
class TypeSpecifiers(DeclarationSpecifiers):
"""
"""
pass
class Initializer(object):
"""
"""
pass
class Declaration(object):
"""
"""
pass
class ParameterDeclaration(Declaration):
"""
"""
pass
class StructDeclaration(Declaration):
"""
"""
pass
class TransUnit(object):
"""
Top level node.
"""
def pyx_decls(self, filenames, modname, macros = {}, names = {}, func_cb=None, cprefix="", **kw):
# PART 1: emit extern declarations
ostream = OStream()
now = datetime.today()
ostream.putln( now.strftime('# Code generated by pyxelator on %x at %X') + '\n' )
ostream.putln("# PART 1: extern declarations")
for filename in filenames:
ostream.putln( 'cdef extern from "%s":\n pass\n' % filename )
ostream.putln( 'cdef extern from *:' )
file = None # current file
for node in self:
ostream.putln('')
ostream.putln(' # ' + node.cstr() )
assert node.marked
comment = False
if node.name and node.name in names:
comment = True # redeclaration
#ostream.putln( node.deepstr( comment=True ) )
s = node.pyxstr(indent=1, names=names, tag_lookup = self.tag_lookup, cprefix=cprefix, **kw)
if s.split():
if comment:
s = "#"+s.replace( '\n', '\n#' ) + " # redeclaration "
if node.file != file:
file = node.file
#ostream.putln( 'cdef extern from "%s":' % file )
ostream.putln( ' # "%s"' % file )
ostream.putln( s )
ostream.putln('\n')
#s = '\n'.join(toks)
return ostream.join()
# XX warn when we find a python keyword XX
python_kws = """
break continue del def except exec finally pass print raise
return try global assert lambda yield
for while if elif else and in is not or import from """.split()
python_kws = dict( zip( python_kws, (None,)*len(python_kws) ) )
| omco/mpir | yasm/tools/python-yasm/pyxelator/genpyx.py | Python | lgpl-3.0 | 18,513 | 0.022471 |
import logging
from sevenbridges.meta.resource import Resource
from sevenbridges.meta.fields import HrefField, StringField
from sevenbridges.meta.transformer import Transform
logger = logging.getLogger(__name__)
class User(Resource):
"""
Central resource for managing users.
"""
_URL = {
'me': '/user',
'get': '/users/{id}',
'query': '/users',
'delete': '/users/{username}'
}
href = HrefField(read_only=True)
username = StringField(read_only=True)
email = StringField(read_only=True)
first_name = StringField(read_only=True)
last_name = StringField(read_only=True)
affiliation = StringField(read_only=True)
phone = StringField(read_only=True)
address = StringField(read_only=True)
state = StringField(read_only=True)
country = StringField(read_only=True)
zip_code = StringField(read_only=True)
city = StringField(read_only=True)
role = StringField(read_only=True)
def __eq__(self, other):
if type(other) is not type(self):
return False
return self is other or self.username == other.username
def __str__(self):
return f'<User: username={self.username}>'
@classmethod
def me(cls, api=None):
"""
Retrieves current user information.
:param api: Api instance.
:return: User object.
"""
api = api if api else cls._API
extra = {
'resource': cls.__name__,
'query': {}
}
logger.info('Fetching user information', extra=extra)
user_data = api.get(cls._URL['me']).json()
return User(api=api, **user_data)
@classmethod
def get(cls, user, api=None):
api = api if api else cls._API
user = Transform.to_user(user)
return super().get(id=user, api=api)
@classmethod
def query(cls, division, role=None, offset=None, limit=None, api=None):
"""Query division users
:param division: Division slug.
:param role: User role in division.
:param offset: Pagination offset.
:param limit: Pagination limit.
:param api: Api instance.
:return: Collection object.
"""
api = api or cls._API
params = {
'division': Transform.to_division(division),
}
if role:
params['role'] = role
return super()._query(
url=cls._URL['query'],
api=api,
offset=offset,
limit=limit,
**params
)
def disable(self, api=None):
"""
Disable user
:param api: Api instance.
:return:
"""
api = api or self._API
api.delete(
url=self._URL['delete'].format(username=self.username)
)
| sbg/sevenbridges-python | sevenbridges/models/user.py | Python | apache-2.0 | 2,815 | 0 |
import apsw
import datetime
from playhouse.apsw_ext import *
from playhouse.tests.base import ModelTestCase
db = APSWDatabase(':memory:')
class BaseModel(Model):
class Meta:
database = db
class User(BaseModel):
username = CharField()
class Message(BaseModel):
user = ForeignKeyField(User)
message = TextField()
pub_date = DateTimeField()
published = BooleanField()
class APSWTestCase(ModelTestCase):
requires = [Message, User]
def test_db_register_functions(self):
result = db.execute_sql('SELECT date_part(?, ?)', (
'day', '2015-01-02 03:04:05')).fetchone()[0]
self.assertEqual(result, 2)
result = db.execute_sql('SELECT date_trunc(?, ?)', (
'day', '2015-01-02 03:04:05')).fetchone()[0]
self.assertEqual(result, '2015-01-02')
def test_db_pragmas(self):
test_db = APSWDatabase(':memory:', pragmas=(
('cache_size', '1337'),
))
test_db.connect()
cs = test_db.execute_sql('PRAGMA cache_size;').fetchone()[0]
self.assertEqual(cs, 1337)
def test_select_insert(self):
users = ('u1', 'u2', 'u3')
for user in users:
User.create(username=user)
self.assertEqual([x.username for x in User.select()], ['u1', 'u2', 'u3'])
self.assertEqual([x.username for x in User.select().filter(username='x')], [])
self.assertEqual([x.username for x in User.select().filter(username__in=['u1', 'u3'])], ['u1', 'u3'])
dt = datetime.datetime(2012, 1, 1, 11, 11, 11)
Message.create(user=User.get(username='u1'), message='herps', pub_date=dt, published=True)
Message.create(user=User.get(username='u2'), message='derps', pub_date=dt, published=False)
m1 = Message.get(message='herps')
self.assertEqual(m1.user.username, 'u1')
self.assertEqual(m1.pub_date, dt)
self.assertEqual(m1.published, True)
m2 = Message.get(message='derps')
self.assertEqual(m2.user.username, 'u2')
self.assertEqual(m2.pub_date, dt)
self.assertEqual(m2.published, False)
def test_update_delete(self):
u1 = User.create(username='u1')
u2 = User.create(username='u2')
u1.username = 'u1-modified'
u1.save()
self.assertEqual(User.select().count(), 2)
self.assertEqual(User.get(username='u1-modified').id, u1.id)
u1.delete_instance()
self.assertEqual(User.select().count(), 1)
def test_transaction_handling(self):
dt = datetime.datetime(2012, 1, 1, 11, 11, 11)
def do_ctx_mgr_error():
with db.transaction():
User.create(username='u1')
raise ValueError
self.assertRaises(ValueError, do_ctx_mgr_error)
self.assertEqual(User.select().count(), 0)
def do_ctx_mgr_success():
with db.transaction():
u = User.create(username='test')
Message.create(message='testing', user=u, pub_date=dt, published=1)
do_ctx_mgr_success()
self.assertEqual(User.select().count(), 1)
self.assertEqual(Message.select().count(), 1)
@db.commit_on_success
def create_error():
u = User.create(username='test')
Message.create(message='testing', user=u, pub_date=dt, published=1)
raise ValueError
self.assertRaises(ValueError, create_error)
self.assertEqual(User.select().count(), 1)
@db.commit_on_success
def create_success():
u = User.create(username='test')
Message.create(message='testing', user=u, pub_date=dt, published=1)
create_success()
self.assertEqual(User.select().count(), 2)
self.assertEqual(Message.select().count(), 2)
def test_exists_regression(self):
User.create(username='u1')
self.assertTrue(User.select().where(User.username == 'u1').exists())
self.assertFalse(User.select().where(User.username == 'ux').exists())
| funkypawz/MakerRobot | peewee-master/playhouse/tests/test_apsw.py | Python | gpl-3.0 | 4,048 | 0.002223 |
from django.shortcuts import render_to_response, get_object_or_404
from django.http import JsonResponse, HttpResponseRedirect
from django.template import RequestContext
from django.core.urlresolvers import reverse
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Avg
from django.db import IntegrityError
from .models import (Category,
TemplateFormat,
TemplateElement,
Document,
ExtractedElements)
from .forms import CreateCategoryForm, TemplateFormatForm, DocumentForm
from .serializers import get_templates_for_category
import json
def home(request):
"""
View for home page.
"""
return render_to_response("home.html", {},
context_instance=RequestContext(request))
def get_all_formats(request, category):
"""
View to get all template formats that exist for the particular category
"""
try:
category = Category.objects.get(slug=category)
except Category.DoesNotExist:
message = "Invalid category selected"
return JsonResponse({"error": "true", "message": message})
all_templates = get_templates_for_category(category)
return JsonResponse({"error": "false", "data": all_templates})
def create_category(request):
"""
View to create category
"""
if request.method == "GET":
form = CreateCategoryForm()
return render_to_response("create_category.html", {"form": form},
context_instance=RequestContext(request))
elif request.method == "POST":
form = CreateCategoryForm(data=request.POST)
if not form.is_valid():
return render_to_response("create_category.html",
{"form": form, "errors": form.errors},
context_instance=RequestContext(request))
try:
category = Category.objects.create(
category_name=form.cleaned_data['category_name'],
description=form.cleaned_data['description'])
except IntegrityError:
message = "Category with the same name already exist"
return render_to_response("create_category.html",
{"form": form, "errors": message},
context_instance=RequestContext(request))
redirect_url = reverse('create-template')
redirect_url += "?categ=%s" %(category.slug)
return HttpResponseRedirect(redirect_url)
def create_template_format(request):
"""
View to create new template format.
"""
if request.method == "GET":
form = TemplateFormatForm()
return render_to_response("create_format.html",
{"form": form},
context_instance=RequestContext(request))
elif request.method == "POST":
form = TemplateFormatForm(data=request.POST)
if not form.is_valid():
return render_to_response("create_format.html",
{"form": form, "errors": form.errors},
context_instance=RequestContext(request))
category = get_object_or_404(Category,
slug=form.cleaned_data['category'])
try:
template = TemplateFormat.objects.create(
category=category,
template_name=form.cleaned_data['template_name']
)
except IntegrityError:
message = "Template Name Already exist"
return render_to_response("create_format.html",
{"form": form, "errors": message},
context_instance=RequestContext(request))
redirect_url = reverse('upload_document')
redirect_url += "?categ=%s&format=%s" %(category.slug,
template.slug)
return HttpResponseRedirect(redirect_url)
def upload_document(request):
"""
View for handling document upload
"""
if request.method == "GET":
form = DocumentForm()
return render_to_response("upload_document.html",
{"form": form},
context_instance=RequestContext(request))
elif request.method == "POST":
form = DocumentForm(request.POST, request.FILES)
if not form.is_valid():
return render_to_response("upload_document.html",
{"form": form, "errors": form.errors},
context_instance=RequestContext(request))
template = get_object_or_404(TemplateFormat,
slug=form.cleaned_data['template'])
document = Document.objects.create(
template_format=template,
document_name=form.cleaned_data['document_name'],
document=request.FILES['document']
)
return HttpResponseRedirect(
reverse('particular_document',
kwargs={"unique_id": document.id}
))
@csrf_exempt
def particular_document(request, unique_id):
"""
View to display a particular document and let the end user to select
elements from it on the frontend and save them
"""
document = get_object_or_404(Document, id=unique_id)
all_elements = document.template_format.templateelement_set.all()
if request.method == "GET":
if document.extractedelements_set.all().count() > 0 :
return HttpResponseRedirect(reverse('preview_document',
kwargs={"unique_id":document.id}))
return render_to_response('document_selector.html',
{"document": document,
"elements": all_elements},
context_instance=RequestContext(request))
elif request.method == "POST":
data = json.loads(json.loads(request.POST['data']))
if document.image_resolution_x and document.image_resolution_y:
pass
else:
document.image_resolution_x = data["image_width"]
document.image_resolution_y = data["image_height"]
document.save()
template = document.template_format
document.extractedelements_set.all().delete()
for element_name in data["elements"]:
element = TemplateElement.objects.get_or_create(
template=template, element_name=element_name)[0]
extracted_element = ExtractedElements.objects.get_or_create(
document=document, element=element)[0]
extracted_element.x1_coordinate = data[element_name]["x"]
extracted_element.y1_coordinate = data[element_name]["y"]
extracted_element.block_width = data[element_name]["width"]
extracted_element.block_height = data[element_name]["height"]
extracted_element.save()
return JsonResponse({"error": "false",
"message": "Successfully saved elements"})
def all_documents(request):
"""
View to display all documents
"""
documents = Document.objects.all()
if request.method == "GET":
return render_to_response("all_documents.html",
{"documents": documents},
context_instance=RequestContext(request))
def document_preview(request, unique_id):
"""
View to preview/ update a document. Any document for which the elements
have been created is eligible for preview/ update
"""
document = get_object_or_404(Document, id=unique_id)
elements = document.template_format.templateelement_set.all()
return render_to_response("document_elements.html",
{"document": document, "elements": elements},
context_instance=RequestContext(request))
def get_element_coordinates(request, unique_id, element):
"""
Get approx coordinates of a particular element for a given template format
Average of all values of the particular element for various documents is
considered.
"""
try:
document = Document.objects.get(id=unique_id)
except Document.DoesNotExist:
return JsonResponse({
"error": "true",
"message": "Document Does not exist"
})
template = document.template_format
try:
element = TemplateElement.objects.get(template=template,
element_name__iexact=element)
except TemplateElement.DoesNotExist:
return JsonResponse({"error": "true",
"message": "Element Does not exist"})
avg_x = ExtractedElements.objects.filter(
element=element).aggregate(Avg('x1_coordinate'))
avg_y = ExtractedElements.objects.filter(
element=element).aggregate(Avg('y1_coordinate'))
avg_height = ExtractedElements.objects.filter(
element=element).aggregate(Avg('block_height'))
avg_width = ExtractedElements.objects.filter(
element=element).aggregate(Avg('block_width'))
return JsonResponse({"error": "false", "x": avg_x, "y": avg_y,
"height": avg_height, "width": avg_width})
| pulkitpahwa/smart-image-coordinates | smarter/base/views.py | Python | mit | 9,603 | 0.001041 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import httpretty
import json
import sure
from pyeqs import QuerySet, Filter
from pyeqs.dsl import Term, Sort, ScriptScore
from tests.helpers import homogeneous
@httpretty.activate
def test_create_queryset_with_host_string():
"""
Create a queryset with a host given as a string
"""
# When create a queryset
t = QuerySet("localhost", index="bar")
# And I have records
response = {
"took": 1,
"hits": {
"total": 1,
"max_score": 1,
"hits": [
{
"_index": "bar",
"_type": "baz",
"_id": "1",
"_score": 10,
"_source": {
"foo": "bar"
},
"sort": [
1395687078000
]
}
]
}
}
httpretty.register_uri(httpretty.GET, "http://localhost:9200/bar/_search",
body=json.dumps(response),
content_type="application/json")
# When I run a query
results = t[0:1]
# Then I see the response.
len(results).should.equal(1)
@httpretty.activate
def test_create_queryset_with_host_dict():
"""
Create a queryset with a host given as a dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 8080}
t = QuerySet(connection_info, index="bar")
# And I have records
good_response = {
"took": 1,
"hits": {
"total": 1,
"max_score": 1,
"hits": [
{
"_index": "bar",
"_type": "baz",
"_id": "1",
"_score": 10,
"_source": {
"foo": "bar"
},
"sort": [
1395687078000
]
}
]
}
}
bad_response = {
"took": 1,
"hits": {
"total": 0,
"max_score": None,
"hits": []
}
}
httpretty.register_uri(httpretty.GET, "http://localhost:9200/bar/_search",
body=json.dumps(bad_response),
content_type="application/json")
httpretty.register_uri(httpretty.GET, "http://localhost:8080/bar/_search",
body=json.dumps(good_response),
content_type="application/json")
# When I run a query
results = t[0:1]
# Then I see the response.
len(results).should.equal(1)
results[0]["_source"]["foo"].should.equal("bar")
@httpretty.activate
def test_create_queryset_with_host_list():
"""
Create a queryset with a host given as a list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 8080}]
t = QuerySet(connection_info, index="bar")
# And I have records
good_response = {
"took": 1,
"hits": {
"total": 1,
"max_score": 1,
"hits": [
{
"_index": "bar",
"_type": "baz",
"_id": "1",
"_score": 10,
"_source": {
"foo": "bar"
},
"sort": [
1395687078000
]
}
]
}
}
bad_response = {
"took": 1,
"hits": {
"total": 0,
"max_score": None,
"hits": []
}
}
httpretty.register_uri(httpretty.GET, "http://localhost:9200/bar/_search",
body=json.dumps(bad_response),
content_type="application/json")
httpretty.register_uri(httpretty.GET, "http://localhost:8080/bar/_search",
body=json.dumps(good_response),
content_type="application/json")
# When I run a query
results = t[0:1]
# Then I see the response.
len(results).should.equal(1)
results[0]["_source"]["foo"].should.equal("bar")
| Yipit/pyeqs | tests/unit/test_connection.py | Python | mit | 4,100 | 0.002439 |
from django.contrib.sitemaps import Sitemap
from geonode.maps.models import Layer, Map
class LayerSitemap(Sitemap):
changefreq = "never"
priority = 0.5
def items(self):
return Layer.objects.all()
def lastmod(self, obj):
return obj.date
class MapSitemap(Sitemap):
changefreq = "never"
priority = 0.5
def items(self):
return Map.objects.all()
| cga-harvard/cga-worldmap | geonode/sitemap.py | Python | gpl-3.0 | 398 | 0.005025 |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Verifies that we can correctly generate atomic2 input files."""
import os
from absl import app
from absl import flags
from absl import logging
from tensorflow.io import gfile
from smu.parser import smu_parser_lib
from smu.parser import smu_writer_lib
flags.DEFINE_string(
'input_glob', None, 'Glob of .dat files to read. '
'These files are expected to be in the SMU file format provided by Uni Basel.'
)
flags.DEFINE_string(
'atomic_input_dir', None,
'Directory containing .inp files named like x07_c2n2f3h3.253852.001.inp '
'These are the desired outputs')
flags.DEFINE_string('output_dir', None,
'If given, given to write files with mismatches')
flags.mark_flag_as_required('input_glob')
flags.mark_flag_as_required('atomic_input_dir')
FLAGS = flags.FLAGS
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
atomic_writer = smu_writer_lib.AtomicInputWriter()
file_count = 0
conformer_count = 0
mismatches = 0
for filepath in gfile.glob(FLAGS.input_glob):
logging.info('Processing file %s', filepath)
file_count += 1
smu_parser = smu_parser_lib.SmuParser(filepath)
for conformer, _ in smu_parser.process_stage2():
conformer_count += 1
actual_contents = atomic_writer.process(conformer)
expected_fn = atomic_writer.get_filename_for_atomic_input(conformer)
with gfile.GFile(os.path.join(FLAGS.atomic_input_dir,
expected_fn)) as expected_f:
expected_contents = expected_f.readlines()
try:
smu_writer_lib.check_dat_formats_match(expected_contents,
actual_contents.splitlines())
except smu_writer_lib.DatFormatMismatchError as e:
mismatches += 1
print(e)
if FLAGS.output_dir:
with gfile.GFile(
os.path.join(
FLAGS.output_dir,
atomic_writer.get_filename_for_atomic_input(conformer)),
'w') as f:
f.write(actual_contents)
status_str = ('COMPLETE: Read %d files, %d conformers, %d mismatches\n' %
(file_count, conformer_count, mismatches))
logging.info(status_str)
print(status_str)
if __name__ == '__main__':
app.run(main)
| google-research/google-research | smu/parser/smu_atomic_input_verifier.py | Python | apache-2.0 | 2,945 | 0.006452 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import jsonfield.fields
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('travel_times', '0002_auto_20150717_1221'),
]
operations = [
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(editable=False, default=django.utils.timezone.now, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(editable=False, default=django.utils.timezone.now, verbose_name='modified')),
('postcode', models.CharField(max_length=14)),
('place_name', models.CharField(max_length=255, blank=True)),
('location_json', jsonfield.fields.JSONField()),
('top_categories', jsonfield.fields.JSONField()),
('top_companies', jsonfield.fields.JSONField()),
('latest_jobs', jsonfield.fields.JSONField()),
('travel_times_map', models.ForeignKey(to='travel_times.TravelTimesMap')),
],
options={
'abstract': False,
},
),
]
| lm-tools/situational | situational/apps/travel_report/migrations/0001_initial.py | Python | bsd-3-clause | 1,406 | 0.002845 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Elementwise operators"""
# pylint: disable=redefined-builtin
import tvm
from tvm import te
from . import tag
from . import cpp
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def identity(x):
"""Take identity of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
# pylint: disable=unnecessary-lambda
return te.compute(x.shape, lambda *i: x(*i))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def negative(x):
"""Take negation of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
# pylint: disable=unnecessary-lambda
return te.compute(x.shape, lambda *i: -x(*i))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def exp(x):
"""Take exponential of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.exp(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def erf(x):
"""Take gauss error function of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.erf(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def tanh(x):
"""Take hyperbolic tanh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.tanh(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def tan(x):
"""Take tan of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.tan(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def cos(x):
"""Take cos of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.cos(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def cosh(x):
"""Take cosh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.cosh(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def sin(x):
"""Take sin of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.sin(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def sinh(x):
"""Take sinh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.sinh(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def acos(x):
"""Take arc cos of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.acos(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def acosh(x):
"""Take arc cosh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.acosh(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def asin(x):
"""Take arc sin of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.asin(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def asinh(x):
"""Take arc sinh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.asinh(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def atan(x):
"""Take atan of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.atan(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def atanh(x):
"""Take atanh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.atanh(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def floor(x):
"""Take floor of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.floor(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def ceil(x):
"""Take ceil of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.ceil(x(*i)))
def sign(x):
"""Returns -1, 0, 1 based on sign of x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return cpp.sign(x)
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def trunc(x):
"""Take truncated value of the input of x, element-wise.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.trunc(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def abs(x):
"""Take absolute value of the input of x, element-wise.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.abs(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def isnan(x):
"""Check if value of x is NaN, element-wise.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.isnan(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def isfinite(x):
"""Check if value of x is finite, element-wise.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.isfinite(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def isinf(x):
"""Check if value of x is infinite, element-wise.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.isinf(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def round(x):
"""Round elements of x to nearest integer.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.round(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def log(x):
"""Take logarithm of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.log(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def log2(x):
"""Take logarithm to the base 2 of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.log2(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def log10(x):
"""Take logarithm to the base 10 of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.log10(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def sqrt(x):
"""Take square root of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.sqrt(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def rsqrt(x):
"""Take inverse square root of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.rsqrt(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def sigmoid(x):
"""Take sigmoid tanh of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: te.sigmoid(x(*i)))
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def left_shift(x, n):
"""Take n bits left shift of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
n : int
Number of bits.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: x(*i) << n)
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def right_shift(x, n):
"""Take n bits right shift of input x.
Parameters
----------
x : tvm.te.Tensor
Input argument.
n : int
Number of bits.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return te.compute(x.shape, lambda *i: x(*i) >> n)
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def clip(x, a_min, a_max):
"""Clip (limit) the values in an array. Given an interval, values
outside the interval are clipped to the interval edges.
Parameters
----------
x : tvm.te.Tensor
Input argument.
a_min : int or float
Minimum value.
a_max : int or float
Maximum value.
Returns
-------
y : tvm.te.Tensor
The result.
"""
def _compute(*indices):
value = x(*indices)
const_min = tvm.tir.const(a_min, value.dtype)
const_max = tvm.tir.const(a_max, value.dtype)
return tvm.te.max(tvm.te.min(value, const_max), const_min)
return te.compute(x.shape, _compute)
@tvm.te.tag_scope(tag=tag.ELEMWISE)
def fixed_point_multiply(x, multiplier, shift):
"""Fixed point multiplication between data and a fixed point
constant expressed as multiplier * 2^(-shift), where multiplier
is a Q-number with 31 fractional bits
Parameters
----------
x : tvm.te.Tensor or Expr
Input argument.
multiplier : int
Multiplier of a fixed floating point number described as multiplier*2^(-shift).
shift : int
Shift of a fixed floating point number described as multiplier*2^(-shift).
Returns
-------
y : tvm.te.Tensor
The result.
"""
def _compute(*indices):
value = x(*indices)
return tvm.tir.q_multiply_shift(
value,
tvm.tir.const(multiplier, "int32"),
tvm.tir.const(31, "int32"),
tvm.tir.const(shift, "int32"),
)
return te.compute(x.shape, _compute)
def cast(x, dtype, span=None):
"""Cast input to specified data type.
Parameters
----------
x : tvm.te.Tensor or Expr
Input argument.
dtype : str
Data type.
span : Optional[Span]
The location of the cast in the source.
Returns
-------
y : tvm.te.Tensor
The result.
"""
if isinstance(x, te.tensor.Tensor):
return te.compute(x.shape, lambda *i: x(*i).astype(dtype), tag=tag.ELEMWISE)
# pylint: disable=import-outside-toplevel
from tvm.tir import _ffi_api
return _ffi_api._cast(dtype, x, span)
def reinterpret(x, dtype):
"""Reinterpret input to specified data type.
Parameters
----------
x : tvm.te.Tensor
Input argument.
dtype : str
Data type.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return cpp.reinterpret(x, dtype)
def fast_exp(x):
"""Take exponential of input x using fast_exp implementation
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return cpp.fast_exp(x, x.dtype, tag.ELEMWISE)
def fast_tanh(x):
"""Take hyperbolic tangent of input x using fast_tanh implementation
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return cpp.fast_tanh(x, x.dtype, tag.ELEMWISE)
def fast_erf(x):
"""Take gauss error function of input x using fast_erf implementation.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
return cpp.fast_erf(x, x.dtype, tag.ELEMWISE)
def ceil_log2(x):
"""Compute integer ceil log2 with a special code path for vulkan
SPIR-V does not support log2 on fp64. Instead, we compute integer ceil_log2 via clz
intrinsic when the target is vulkan.
Parameters
----------
x : tvm.te.Tensor
Input argument.
Returns
-------
y : tvm.te.Tensor
The result.
"""
if not isinstance(x, tvm.tir.PrimExpr):
x = tvm.tir.const(x)
if "float" in x.dtype:
return tvm.tir.ceil(tvm.tir.log2(x))
if "vulkan" in tvm.target.Target.current().kind.name:
clz = tvm.tir.clz(x)
bits = int(x.dtype[-2:])
res = tvm.tir.if_then_else(x & (x - 1) == 0, bits - clz - 1, bits - clz)
if res.dtype != x.dtype:
return cast(res, x.dtype)
return res
return cast(tvm.tir.ceil(tvm.tir.log2(cast(x, "float64"))), x.dtype)
| dmlc/tvm | python/tvm/topi/math.py | Python | apache-2.0 | 15,214 | 0.000329 |
import docapi
import docbuilder
import testtools
g_trace = """\
lint-syntax.js:3:32 JSLintWarning: Unexpected trailing space.
lint-syntax.js:7:10 JSLintWarning: Redefinition of 'some_function' from line 2.
lint-syntax.js:8:16 JSLintWarning: Expected '{' and instead saw 'return'.
lint-syntax.js:9:2 JSLintWarning: Use spaces, not tabs.
lint-semantics.js:3:10 JSLintWarning: Unused 'find_key'.
lint-semantics.js:3:20 JSLintWarning: Unexpected space between '(' and 'obj'.
lint-semantics.js:3:29 JSLintWarning: Unexpected space between 'val' and ')'.
lint-semantics.js:5:5 JSLintWarning: Unexpected 'for'.
lint-semantics.js:5:8 JSLintWarning: Expected one space between 'for' and '('.
lint-semantics.js:5:10 JSLintWarning: Unexpected space between '(' and 'key'.
lint-semantics.js:5:21 JSLintWarning: Unexpected space between 'obj' and ')'.
lint-semantics.js:6:11 JSLintWarning: Expected one space between 'if' and '('.
lint-semantics.js:6:13 JSLintWarning: Unexpected space between '(' and 'obj'.
lint-semantics.js:7:26 JSLintWarning: Unexpected trailing space.
"""
def DefineDocument( doc ):
docbuilder.parse( __file__.replace( '.py', '.xml' ), doc )
doc.MakeReadOnly()
view = testtools.create_view_with_default_procctx( doc, None, docapi.IProcessingContext.PM_RUNTIME_STREAM )
testtools.should_trace( lambda: view.Update(), g_trace, False )
| opentext/storyteller | docplatform/distribution/py/pfdesigns/javascript/lint.py | Python | apache-2.0 | 1,390 | 0.010791 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# ------------------------------------------------------------------------------
# Name: files.py
# Package: utl
# Project: utl
#
# Created: 10.10.13 11:37
# Copyright 2013-2016 © Constantin Roganov
# License: The MIT License
# ------------------------------------------------------------------------------
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
"""File related utilities"""
from __future__ import absolute_import, unicode_literals, print_function
from builtins import *
import os
import fileinput
import functools
from .text import lines_parser, lines_stripped
__author__ = 'Constantin Roganov'
binary_file = functools.partial(open, mode='rb')
binary_file.__doc__ = 'Open binary file for reading'
writable_binary_file = functools.partial(open, mode='wb')
writable_binary_file.__doc__ = 'Open binary file for writing'
text_file = functools.partial(open, mode='r')
text_file.__doc__ = 'Open text file for reading'
writable_text_file = functools.partial(open, mode='w')
writable_text_file.__doc__ = 'Open text file for writing'
utf8_bom_text_file = functools.partial(open, mode='r', encoding='utf_8_sig')
utf8_bom_text_file.__doc__ = 'Open UTF8 text file with BOM for reading'
def file_lines_count(filename):
"""Count lines in a text file"""
# source:
# http://stackoverflow.com/questions/845058/how-to-get-line-count-cheaply-in-python
with open(filename) as fo:
lines = 0
buf_size = 1024 * 1024
read_f = fo.read # loop optimization
file_has_data = False
buf = read_f(buf_size)
if buf:
file_has_data = True
while buf:
lines += buf.count('\n')
buf = read_f(buf_size)
# nonempty file has 1 line at least
if file_has_data:
lines += 1
return lines
def _reverse_blocks_generator(fd, block_size=4096):
"""Return generator which reads file as series of blocks from the tail of file up to to head.
The data itself is in normal order, only the order of the blocks is reversed.
ie. "hello world" -> ["ld","wor", "lo ", "hel"]
Note that the file must be opened in binary mode.
"""
# source:
# http://cybervadim.blogspot.ru/2009/10/reverse-file-iterator-in-python.html
if 'b' not in fd.mode.lower():
raise TypeError('File must be opened in binary mode')
size = os.stat(fd.name).st_size
fullblocks, lastblock = divmod(size, block_size)
# The first(end of file) block will be short, since this leaves
# the rest aligned on a blocksize boundary. This may be more
# efficient than having the last (first in file) block be short
fd.seek(-lastblock, os.SEEK_END)
yield fd.read(lastblock)
for i in range(fullblocks - 1, -1, -1):
fd.seek(i * block_size)
yield fd.read(block_size)
def reverse_lines(fd, keepends=False, block_size=4096, encoding='ascii'):
"""Iterate through the lines of a file in reverse order.
If keepends is true, line endings are kept as part of the line.
Return generator.
"""
# source:
# http://cybervadim.blogspot.ru/2009/10/reverse-file-iterator-in-python.html
buf = ''
for block in _reverse_blocks_generator(fd, block_size):
buf = block.decode(encoding) + buf
lines = buf.splitlines(keepends)
# Return all lines except the first (since may be partial)
if lines:
lines.reverse()
buf = lines.pop() # Last line becomes end of new first line.
for line in lines:
yield line
yield buf # First line.
def filelist_processor(iterable, parse_line, progress_co=None):
"""Generator of parsed lines from each text file (path) in iterable.
iterable - sequence of file paths or None (there sys.argv[1:] will be used)
parse_line - callable for processing of single line
progress_co - coroutine with API like below:
progress_co = progress_generator()
progress_co.send((filename, lines_read, lines_total, lines_processed))
...
progress_co.send(lines_saved) # finalizing work
Generates output data in format produced by parse_line()
"""
files = None if iterable is None else lines_stripped(iterable)
inp = fileinput.input(files=files)
pth, name, lines_total = (None, ) * 3
for stats, data in lines_parser(lines_stripped(inp), parse_line):
if inp.isfirstline() or inp.filename() != pth:
pth = inp.filename()
name = os.path.basename(pth)
lines_total = file_lines_count(pth)
if progress_co:
progress_co.send((name, inp.filelineno(), lines_total, stats.processed))
yield data
def offset_iter(fd):
r"""Generator of pairs (offset_from_beginning_of_file, string) for file object 'fd'.
"""
# source: http://bytes.com/topic/python/answers/855199-file-tell-loop
tell = fd.tell
readline = fd.readline
while True:
addr = tell()
line = readline()
if not line:
break
yield addr, line
| brake/python-utl | utl/files.py | Python | mit | 6,385 | 0.000783 |
#!/usr/bin/env python2.7
from bioblend.galaxy import GalaxyInstance
import requests
import datetime
import argparse
requests.packages.urllib3.disable_warnings()
def parse_args():
args = argparse.ArgumentParser(description="Rename history items using a tabular file." +"\n" +
"Example usage: python rename_hist_items.py -url misssissippi.snv.jussieu.fr \
-key $your_api_key -hid $your_history_id -table $your_tabular_file \n \
See test-data/sample_table.tab for an example file.")
args.add_argument("-url", "--galaxy_url", required=True, help="url of galaxy instance")
args.add_argument("-key", "--api_key", required=True, help="api key for galaxy instance" )
args.add_argument("-hid", "--history_id", required=True, help="History id of hitory containing files to be renamed")
args.add_argument("-table", "--rename_table", required=True, type=file,
help="tab-seperated file with first column current filename,\
and second column the desired name")
return args.parse_args()
def return_datetime(string_representation):
"""
returns current time, to find last modified history.
Currently ununsed, may be used in the future.
"""
date, time = string_representation.split('T')
return datetime.datetime.strptime(date + ' ' + time, "%Y-%m-%d %H:%M:%S.%f")
def get_rename_list(rename_table):
return [(line.split('\t')[0],line.split('\t')[1].strip()) for line in rename_table]
def get_instance(url, api_key):
return GalaxyInstance(url, api_key)
def get_name_id_d(gi, hid):
return {dataset[u'name']:dataset[u'id'] for dataset in gi.histories.show_history(hid, contents=True)}
def update_names(gi, hid, rename_list, name_id_d ):
for old_name, new_name in rename_list:
dataset_id = name_id_d[old_name]
gi.histories.update_dataset(history_id=hid, dataset_id=dataset_id, name=new_name)
def main():
args = parse_args()
hid = args.history_id
rename_list = get_rename_list(args.rename_table)
gi = get_instance(args.galaxy_url, args.api_key)
name_id_d = get_name_id_d(gi, hid)
rval = update_names(gi, hid, rename_list, name_id_d)
if __name__ == "__main__":
main()
| ARTbio/tools-artbio | scripts/helper_scripts/rename_history_items/rename_hist_items.py | Python | mit | 2,334 | 0.008997 |
## @file
# Module to gather dependency information for ASKAP packages
#
# @copyright (c) 2006 CSIRO
# Australia Telescope National Facility (ATNF)
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# PO Box 76, Epping NSW 1710, Australia
# atnf-enquiries@csiro.au
#
# This file is part of the ASKAP software distribution.
#
# The ASKAP software distribution is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the License
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
#
# @author Malte Marquarding <malte.marquarding@csiro.au>
#
import glob
import os
import socket
import askapdev.rbuild.utils as utils
from ..exceptions import BuildError
from ordereddict import OrderedDict
## An object to hold information about a dependency tree for a nominated package
# This package can be added manually, or if None specified, the information
# will be taken from a file 'dependencies.{default,hostname}' in the current
# directory.
#
# Example:
# @code
# f = file("dependencies.default")
# f.write("numpy=3rdParty/numpy/numpy-1.0.2;==1.0.2\n")
# f.close()
# from askapdev.rbuild.dependencies import Dependency
# dep = Dependency()
# dep.add_package()
# @endcode
#
class Dependency:
## Construct an empty dependency tree
# @param self the object reference
# @param silent minimal feedback
# @param autobuild warn rather than fail on multiple version dependnecies. XXX
def __init__(self, silent=True, autobuild=False):
## The ASKAP top-level directory
self.ASKAPROOT = os.environ.get("ASKAP_ROOT")
if self.ASKAPROOT is None:
msg = "ASKAP_ROOT environment variable is not defined"
raise BuildError(msg)
#
self.DEPFILE = "dependencies" # The basename of the dependency file
self.INSTALL_SUBDIR = "install"
self._deps = OrderedDict()
#
self._bindirs = []
self._incdirs = []
self._libdirs = []
self._rootdirs = []
#
self._cppflags = [] # XXX "defs" in package.info. LOFAR/log4cxx
#
self._env = []
self._jars = []
self._libs = []
self._packages = []
#
self._ldlibpath = ""
self._pypath = ""
#
self._autobuild = autobuild
self._silent = silent # mimimal output
self.selfupdate = False # should object request updates from svn
def q_print(self, msg):
if self._silent:
return
utils.q_print(msg)
## Get the path of the specified dependency package
# @param self the current object
# @param key the label of the package dependency
# @return the path (relative to ASKAP_ROOT) to the package
def get_dep_path(self, key):
return self._deps[key]["path"]
# Used by "in" test.
# object.__contains__(self, item)
#
# Called to implement membership test operators. Should return true if item
# is in self, false otherwise. For mapping objects, this should consider
# the keys of the mapping rather than the values or the key-item pairs.
#
# For objects that do not define __contains__(), the membership test first
# tries iteration via __iter__(), then the old sequence iteration protocol
# via __getitem__(), see this section in the language reference.
#
# http://docs.python.org/reference/datamodel.html
def __contains__(self, key):
return self._deps.has_key(key)
## Get the absolute path to the dependency packages installed location
# @param self the current object
# @param key the label of the package dependency
# @return the absolute path to the package installed location
def get_install_path(self, key):
rel_path = self._deps[key]["path"]
full_path = os.path.join(self.ASKAPROOT, rel_path, self.INSTALL_SUBDIR)
return os.path.abspath(full_path)
def get_path(self):
return os.path.pathsep.join(self._bindirs)
## Get the CPPFLAGS retrieved in the dependency analysis
# @param self the object reference
# @return a list of library names
def get_libs(self, mapped=False):
if mapped:
return self._libs[:]
else:
return [ m[0] for m in self._libs ]
## Get the environment variables retrieved in the dependency analysis
# @param self the object reference
# @return a dictionary of ENVVAR => value pairs
def get_env(self):
return dict([i.split("=") for i in self._env])
## Get the the java classpath for the depencies
# @param self the object reference
# @return a classpath string of the form x/y/z.jar:a/b/c.jar
def get_classpath(self):
return os.path.pathsep.join(self._jars)
## Get the root directories of the tags retrieved in the dependency analysis
# @param self the object reference
# @return a list of directory names
def get_rootdirs(self, mapped=False): # XXX used in ant.py builder with mapped=true.
if mapped:
return [ (k, os.path.join( self.ASKAPROOT, v['path'])) \
for k,v in self._deps.iteritems()]
return self._rootdirs[:]
## Get the LIBRARY directories retrieved in the dependency analysis
# @param self the object reference
# @param mapped return directory tuples (rootdir, libdir)
# @return a list of library directories or tuples of rootdirs and libdirs
def get_librarydirs(self, mapped=False):
if mapped:
return self._libdirs[:]
else:
return [ m[0] for m in self._libdirs ]
## Get the LD_LIBRARY_PATH accumulated in the dependency analysis
# @param self the object reference
# @return a string representing the LD_LIBRARY_PATH
def get_ld_library_path(self):
return self._ldlibpath.strip(":")
## Get the INCLUDE directories retrieved in the dependency analysis
# @param self the object reference
# @return a list of header file directories
def get_includedirs(self):
return self._incdirs[:]
## Get the CPPFLAGS retrieved in the dependency analysis
# @param self the object reference
# @return a list preprocessor flags
def get_cppflags(self):
return self._cppflags[:]
def get_pythonpath(self):
return self._pypath.strip(":")
## Get a list of doxygen tag files in the dependencies. This is used for
# cross-referencing the documentation
# @todo Re-enable: This has been disabled until it is working for python
# @param self the object reference
# @return a list of TAGFILES entries
# XXX used only in scons_tools/askap_package.py
def get_tagfiles(self):
tagfiles = []
for pth in self._rootdirs:
tagname = utils.tag_name(pth)
tagpath = os.path.join(pth, tagname)
if os.path.exists(tagpath):
tagfiles.append('"%s=%s/html"' % (tagpath, pth) )
return tagfiles
def _get_dependencies(self, package):
codename = utils.get_platform()['codename']
hostname = socket.gethostname().split(".")[0]
for ext in ['default', codename, hostname]:
if ext:
depfile = '%s.%s' % (self.DEPFILE, ext)
if package:
depfile = os.path.join(self.ASKAPROOT, package, depfile)
if self.selfupdate:
# always update if it is the "root/target" package
basedir = os.path.split(depfile)[0] or "."
if not os.path.exists(basedir):
utils.update_tree(basedir)
self._get_depfile(depfile)
def _get_depfile(self, depfile, overwrite=False):
if not os.path.exists(depfile):
# assume no dependencies
return
dfh = file(depfile)
for line in dfh.readlines():
line = line.strip()
if line.startswith("#"): continue
kv = line.split("=", 1)
if len(kv) == 2:
key = kv[0].strip()
value = kv[1].strip()
# see if the file explicitly names any libs
lspl = value.split(";")
libs = None
if len(lspl) > 1:
libs = lspl[1].strip().split()
value = lspl[0]
self._add_dependency(key, value, libs, overwrite)
if not value.startswith("/"):
# recurse into ASKAP dependencies
# otherwise just move on as we specified system dependency
# which will not have a dependency file
self._packages.append(value)
self._get_dependencies(value)
dfh.close()
def _get_info(self, packagedir):
info = {
# A single directory path relative to the install directory.
'bindir': 'bin',
'distdir': 'dist',
'incdir': 'include',
'libdir': 'lib',
# Space separated lists. XXX Default should be '[]'?
'defs' : None,
'env': None,
'jars': None,
'libs': None,
# Define a single python module name and version.
# e.g. pymodule=numpy==1.2.0
'pymodule': None,
}
sslists = ['defs', 'env', 'jars', 'libs']
infofile = os.path.join(packagedir, 'package.info')
if os.path.exists(infofile):
f = file(infofile)
for line in f.readlines():
line = line.strip()
if line.startswith("#"): continue
kv = line.split("=", 1)
if len(kv) == 2:
key = kv[0].strip()
value = kv[1].strip()
if key in info.keys():
if key in sslists:
info[key] = value.split()
else:
info[key] = value
f.close()
return info
def _add_dependency(self, key, value, libs, overwrite=False):
if self._deps.has_key(key):
# deal with potential symbolic links for 'default' packages
paths = [self._deps[key]["path"], value]
outpaths = []
for pth in paths:
if not pth.startswith("/"):
pth = os.path.join(os.environ["ASKAP_ROOT"], pth)
pth = os.path.realpath(pth)
outpaths.append(pth)
if outpaths[0] == outpaths[1]:
if libs:
if self._deps[key]["libs"] is not None:
# prepend the libs
self._deps[key]["libs"] = libs + self._deps[key]["libs"]
else:
self._deps[key]["libs"] = libs
self._deps.toend(key)
else:
# another dependency, so move it to the end, so link
# order is correct
self._deps.toend(key)
return
else:
if overwrite:
self._deps[key]["path"] = value
self.q_print("info: Overwriting default package dependency '%s' with host specific package (from %s)" % (key, value) )
elif self._autobuild: # XXX maybe a mistake?
self.q_print("warn: Possible multiple version dependency \n\
%s != %s" % (self._deps[key]["path"], value))
else:
raise BuildError("Multiple version dependency \n\
%s != %s" % (self._deps[key]["path"], value))
else:
self.q_print("info: Adding package dependency '%s' (from %s)" %
(key, value))
# now update the dependency itself
# XXX only used in Tools/scons_tools/askap_package.py
if self.selfupdate:
utils.update_tree(value)
self._deps[key] = {"path": value, "libs": libs}
def _remove_duplicates(self, values):
# find unique elements
libs = [v[0] for v in values]
for k in set(libs):
# remove all but last duplicate entry
while libs.count(k) > 1:
idx = libs.index(k)
libs.pop(idx)
values.pop(idx)
## Add a ThirdPartyLibrary or ASKAP package to the environment
# This will add the package path in ASKAP_ROOT
# @param self the object reference
# @param pkgname The name of the package as in the repository, e.g.
# lapack. Default None means that this is defined in local
# dependencies.xyz
# @param tag The location of the package, e.g.
# 3rdParty/lapack-3.1.1/lapack-3.1.1
# @param libs The name of the libraries to link against,
# default None is the same as the pkgname
# @param libdir The location of the library dir relative to the package,
# default None which will use settings in the package.info file
# @param incdir The location of the include dir relative to the package,
# default None which will use settings in the package.info file
# @param pymodule the 'require' statement to specify this dependency
# statement, e.g. "askap.loghandlers==current"
def add_package(self, pkgname=None, tag=None,
libs=None, libdir=None, incdir=None, bindir=None,
pymodule=None):
self._deps = OrderedDict()
if pkgname:
if not tag:
BuildError("No tag specified")
if self.selfupdate:
#if not os.path.exists(tag):
utils.update_tree(tag)
self._add_path(pkgname, self.ASKAPROOT, tag, libs, libdir,
incdir, bindir, pymodule)
self.q_print("info: Adding package '%s'" % pkgname)
if tag:
tag = os.path.join(self.ASKAPROOT, tag)
self._get_dependencies(tag)
parent = ''
for key, value in self._deps.iteritems():
self._add_path(key, self.ASKAPROOT, value["path"],
libs=value["libs"], parent=parent)
parent = value["path"]
# Add a ASKAP repository path to the environment
# This sets up LIBPATH and CPPPATH
def _add_path(self, pkgname, root, tag, parent='', libs=None,
libdir=None, incdir=None, bindir=None,
pymodule=None):
loc = None
if tag.startswith("/"): # external package
loc = tag
else: # ASKAP package or 3rdParty library
loc = os.path.join(root, tag)
rloc = os.path.relpath(loc, self.ASKAPROOT)
if not os.path.exists(loc):
raise BuildError("Dependency directory '%s' does not exist (requested by %s)." % (rloc,parent))
self._rootdirs += [loc]
info = self._get_info(loc) # get optional package info
idir = os.path.join(loc, self.INSTALL_SUBDIR) # actual installion.
if not bindir: # add bin directory
bindir = info["bindir"]
if bindir: # None means disabled in info file
pth = os.path.join(idir, bindir)
if os.path.exists(pth):
self._bindirs += [pth]
if not incdir: # add include directory
incdir = info["incdir"]
if incdir: # None means disabled in info file
pth = os.path.join(idir, incdir)
if not os.path.exists(pth):
if not pymodule:
self.q_print("warn: incdir '%s' does not exist." % pth)
else:
self._incdirs += [pth]
if not libdir: # add library directory
libdir = info["libdir"]
if libdir: # None means disabled in info file
pth = os.path.join(idir, libdir)
if not os.path.exists(pth):
if not pymodule:
self.q_print("warn: libdir '%s' does not exist." % pth)
else:
self._ldlibpath += os.path.pathsep+pth
self._libdirs += [(pth, idir)]
libs = libs or info["libs"]
addlibs = True
if isinstance(libs, list) and len(libs) == 0:
addlibs = False
libs = libs or pkgname
if not isinstance(libs, list):
libs = [libs]
if addlibs: # only add lib if it's not a python module
nlibs = []
for lib in libs:
instdir = idir
if not glob.glob("{0}/lib{1}*".format(os.path.join(idir,
libdir),
lib)):
instdir = ""
nlibs.append((lib, instdir))
self._libs += nlibs
libs = self._libs[:] # copy
self._remove_duplicates(libs)
self._libs = libs
if info["defs"]: # add package defines
self._cppflags += info["defs"]
if info["env"]: # add environment variables
self._env += info["env"]
# check whether it is python, i.e. pymodule entry in package.info
if not pymodule:
pymodule = info["pymodule"]
if pymodule:
pth = os.path.join(idir, libdir, utils.get_site_dir())
if self._pypath.find(pth) < 1:
self._pypath = os.path.pathsep.join([pth, self._pypath])
if info["jars"]:
pth = os.path.join(idir, libdir)
if not os.path.exists(pth):
if not pymodule:
self.q_print("warn: libdir '%s' does not exist." % pth)
for jar in info["jars"]:
jar = os.path.join(pth, jar)
if jar not in self._jars:
self._jars.append(jar)
| ATNF/askapsdp | Tools/Dev/rbuild/askapdev/rbuild/dependencies/dependency.py | Python | gpl-2.0 | 18,614 | 0.005104 |
import re
from django.conf import settings
from django.contrib.gis.db.backends.base.operations import BaseSpatialOperations
from django.contrib.gis.db.backends.postgis.adapter import PostGISAdapter
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.geometry.backend import Geometry
from django.contrib.gis.measure import Distance
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
from django.db.utils import ProgrammingError
from django.utils.functional import cached_property
from .models import PostGISGeometryColumns, PostGISSpatialRefSys
class PostGISOperator(SpatialOperator):
def __init__(self, geography=False, **kwargs):
# Only a subset of the operators and functions are available
# for the geography type.
self.geography = geography
super(PostGISOperator, self).__init__(**kwargs)
def as_sql(self, connection, lookup, *args):
if lookup.lhs.output_field.geography and not self.geography:
raise ValueError('PostGIS geography does not support the "%s" '
'function/operator.' % (self.func or self.op,))
return super(PostGISOperator, self).as_sql(connection, lookup, *args)
class PostGISDistanceOperator(PostGISOperator):
sql_template = '%(func)s(%(lhs)s, %(rhs)s) %(op)s %%s'
def as_sql(self, connection, lookup, template_params, sql_params):
if not lookup.lhs.output_field.geography and lookup.lhs.output_field.geodetic(connection):
sql_template = self.sql_template
if len(lookup.rhs) == 3 and lookup.rhs[-1] == 'spheroid':
template_params.update({'op': self.op, 'func': 'ST_Distance_Spheroid'})
sql_template = '%(func)s(%(lhs)s, %(rhs)s, %%s) %(op)s %%s'
else:
template_params.update({'op': self.op, 'func': 'ST_Distance_Sphere'})
return sql_template % template_params, sql_params
return super(PostGISDistanceOperator, self).as_sql(connection, lookup, template_params, sql_params)
class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
name = 'postgis'
postgis = True
geography = True
geom_func_prefix = 'ST_'
version_regex = re.compile(r'^(?P<major>\d)\.(?P<minor1>\d)\.(?P<minor2>\d+)')
Adapter = PostGISAdapter
Adaptor = Adapter # Backwards-compatibility alias.
gis_operators = {
'bbcontains': PostGISOperator(op='~'),
'bboverlaps': PostGISOperator(op='&&', geography=True),
'contained': PostGISOperator(op='@'),
'contains': PostGISOperator(func='ST_Contains'),
'overlaps_left': PostGISOperator(op='&<'),
'overlaps_right': PostGISOperator(op='&>'),
'overlaps_below': PostGISOperator(op='&<|'),
'overlaps_above': PostGISOperator(op='|&>'),
'left': PostGISOperator(op='<<'),
'right': PostGISOperator(op='>>'),
'strictly_below': PostGISOperator(op='<<|'),
'stricly_above': PostGISOperator(op='|>>'),
'same_as': PostGISOperator(op='~='),
'exact': PostGISOperator(op='~='), # alias of same_as
'contains_properly': PostGISOperator(func='ST_ContainsProperly'),
'coveredby': PostGISOperator(func='ST_CoveredBy', geography=True),
'covers': PostGISOperator(func='ST_Covers', geography=True),
'crosses': PostGISOperator(func='ST_Crosses)'),
'disjoint': PostGISOperator(func='ST_Disjoint'),
'equals': PostGISOperator(func='ST_Equals'),
'intersects': PostGISOperator(func='ST_Intersects', geography=True),
'overlaps': PostGISOperator(func='ST_Overlaps'),
'relate': PostGISOperator(func='ST_Relate'),
'touches': PostGISOperator(func='ST_Touches'),
'within': PostGISOperator(func='ST_Within'),
'dwithin': PostGISOperator(func='ST_DWithin', geography=True),
'distance_gt': PostGISDistanceOperator(func='ST_Distance', op='>', geography=True),
'distance_gte': PostGISDistanceOperator(func='ST_Distance', op='>=', geography=True),
'distance_lt': PostGISDistanceOperator(func='ST_Distance', op='<', geography=True),
'distance_lte': PostGISDistanceOperator(func='ST_Distance', op='<=', geography=True),
}
def __init__(self, connection):
super(PostGISOperations, self).__init__(connection)
prefix = self.geom_func_prefix
self.area = prefix + 'Area'
self.bounding_circle = prefix + 'MinimumBoundingCircle'
self.centroid = prefix + 'Centroid'
self.collect = prefix + 'Collect'
self.difference = prefix + 'Difference'
self.distance = prefix + 'Distance'
self.distance_sphere = prefix + 'distance_sphere'
self.distance_spheroid = prefix + 'distance_spheroid'
self.envelope = prefix + 'Envelope'
self.extent = prefix + 'Extent'
self.force_rhr = prefix + 'ForceRHR'
self.geohash = prefix + 'GeoHash'
self.geojson = prefix + 'AsGeoJson'
self.gml = prefix + 'AsGML'
self.intersection = prefix + 'Intersection'
self.kml = prefix + 'AsKML'
self.length = prefix + 'Length'
self.length_spheroid = prefix + 'length_spheroid'
self.makeline = prefix + 'MakeLine'
self.mem_size = prefix + 'mem_size'
self.num_geom = prefix + 'NumGeometries'
self.num_points = prefix + 'npoints'
self.perimeter = prefix + 'Perimeter'
self.point_on_surface = prefix + 'PointOnSurface'
self.polygonize = prefix + 'Polygonize'
self.reverse = prefix + 'Reverse'
self.scale = prefix + 'Scale'
self.snap_to_grid = prefix + 'SnapToGrid'
self.svg = prefix + 'AsSVG'
self.sym_difference = prefix + 'SymDifference'
self.transform = prefix + 'Transform'
self.translate = prefix + 'Translate'
self.union = prefix + 'Union'
self.unionagg = prefix + 'Union'
# Following "attributes" are properties due to the spatial_version check and
# to delay database access
@property
def extent3d(self):
if self.spatial_version >= (2, 0, 0):
return self.geom_func_prefix + '3DExtent'
else:
return self.geom_func_prefix + 'Extent3D'
@property
def length3d(self):
if self.spatial_version >= (2, 0, 0):
return self.geom_func_prefix + '3DLength'
else:
return self.geom_func_prefix + 'Length3D'
@property
def perimeter3d(self):
if self.spatial_version >= (2, 0, 0):
return self.geom_func_prefix + '3DPerimeter'
else:
return self.geom_func_prefix + 'Perimeter3D'
@property
def geometry(self):
# Native geometry type support added in PostGIS 2.0.
return self.spatial_version >= (2, 0, 0)
@cached_property
def spatial_version(self):
"""Determine the version of the PostGIS library."""
# Trying to get the PostGIS version because the function
# signatures will depend on the version used. The cost
# here is a database query to determine the version, which
# can be mitigated by setting `POSTGIS_VERSION` with a 3-tuple
# comprising user-supplied values for the major, minor, and
# subminor revision of PostGIS.
if hasattr(settings, 'POSTGIS_VERSION'):
version = settings.POSTGIS_VERSION
else:
try:
vtup = self.postgis_version_tuple()
except ProgrammingError:
raise ImproperlyConfigured(
'Cannot determine PostGIS version for database "%s". '
'GeoDjango requires at least PostGIS version 1.5. '
'Was the database created from a spatial database '
'template?' % self.connection.settings_dict['NAME']
)
version = vtup[1:]
return version
def convert_extent(self, box, srid):
"""
Returns a 4-tuple extent for the `Extent` aggregate by converting
the bounding box text returned by PostGIS (`box` argument), for
example: "BOX(-90.0 30.0, -85.0 40.0)".
"""
if box is None:
return None
ll, ur = box[4:-1].split(',')
xmin, ymin = map(float, ll.split())
xmax, ymax = map(float, ur.split())
return (xmin, ymin, xmax, ymax)
def convert_extent3d(self, box3d, srid):
"""
Returns a 6-tuple extent for the `Extent3D` aggregate by converting
the 3d bounding-box text returned by PostGIS (`box3d` argument), for
example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)".
"""
if box3d is None:
return None
ll, ur = box3d[6:-1].split(',')
xmin, ymin, zmin = map(float, ll.split())
xmax, ymax, zmax = map(float, ur.split())
return (xmin, ymin, zmin, xmax, ymax, zmax)
def convert_geom(self, hex, geo_field):
"""
Converts the geometry returned from PostGIS aggretates.
"""
if hex:
return Geometry(hex, srid=geo_field.srid)
else:
return None
def geo_db_type(self, f):
"""
Return the database field type for the given geometry field.
Typically this is `None` because geometry columns are added via
the `AddGeometryColumn` stored procedure, unless the field
has been specified to be of geography type instead.
"""
if f.geography:
if f.srid != 4326:
raise NotImplementedError('PostGIS only supports geography columns with an SRID of 4326.')
return 'geography(%s,%d)' % (f.geom_type, f.srid)
elif self.geometry:
# Postgis 2.0 supports type-based geometries.
# TODO: Support 'M' extension.
if f.dim == 3:
geom_type = f.geom_type + 'Z'
else:
geom_type = f.geom_type
return 'geometry(%s,%d)' % (geom_type, f.srid)
else:
return None
def get_distance(self, f, dist_val, lookup_type):
"""
Retrieve the distance parameters for the given geometry field,
distance lookup value, and the distance lookup type.
This is the most complex implementation of the spatial backends due to
what is supported on geodetic geometry columns vs. what's available on
projected geometry columns. In addition, it has to take into account
the geography column type newly introduced in PostGIS 1.5.
"""
# Getting the distance parameter and any options.
if len(dist_val) == 1:
value, option = dist_val[0], None
else:
value, option = dist_val
# Shorthand boolean flags.
geodetic = f.geodetic(self.connection)
geography = f.geography
if isinstance(value, Distance):
if geography:
dist_param = value.m
elif geodetic:
if lookup_type == 'dwithin':
raise ValueError('Only numeric values of degree units are '
'allowed on geographic DWithin queries.')
dist_param = value.m
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
# Assuming the distance is in the units of the field.
dist_param = value
if (not geography and geodetic and lookup_type != 'dwithin'
and option == 'spheroid'):
# using distance_spheroid requires the spheroid of the field as
# a parameter.
return [f._spheroid, dist_param]
else:
return [dist_param]
def get_geom_placeholder(self, f, value, compiler):
"""
Provides a proper substitution value for Geometries that are not in the
SRID of the field. Specifically, this routine will substitute in the
ST_Transform() function call.
"""
if value is None or value.srid == f.srid:
placeholder = '%s'
else:
# Adding Transform() to the SQL placeholder.
placeholder = '%s(%%s, %s)' % (self.transform, f.srid)
if hasattr(value, 'as_sql'):
# If this is an F expression, then we don't really want
# a placeholder and instead substitute in the column
# of the expression.
sql, _ = compiler.compile(value)
placeholder = placeholder % sql
return placeholder
def _get_postgis_func(self, func):
"""
Helper routine for calling PostGIS functions and returning their result.
"""
# Close out the connection. See #9437.
with self.connection.temporary_connection() as cursor:
cursor.execute('SELECT %s()' % func)
return cursor.fetchone()[0]
def postgis_geos_version(self):
"Returns the version of the GEOS library used with PostGIS."
return self._get_postgis_func('postgis_geos_version')
def postgis_lib_version(self):
"Returns the version number of the PostGIS library used with PostgreSQL."
return self._get_postgis_func('postgis_lib_version')
def postgis_proj_version(self):
"Returns the version of the PROJ.4 library used with PostGIS."
return self._get_postgis_func('postgis_proj_version')
def postgis_version(self):
"Returns PostGIS version number and compile-time options."
return self._get_postgis_func('postgis_version')
def postgis_full_version(self):
"Returns PostGIS version number and compile-time options."
return self._get_postgis_func('postgis_full_version')
def postgis_version_tuple(self):
"""
Returns the PostGIS version as a tuple (version string, major,
minor, subminor).
"""
# Getting the PostGIS version
version = self.postgis_lib_version()
m = self.version_regex.match(version)
if m:
major = int(m.group('major'))
minor1 = int(m.group('minor1'))
minor2 = int(m.group('minor2'))
else:
raise Exception('Could not parse PostGIS version string: %s' % version)
return (version, major, minor1, minor2)
def proj_version_tuple(self):
"""
Return the version of PROJ.4 used by PostGIS as a tuple of the
major, minor, and subminor release numbers.
"""
proj_regex = re.compile(r'(\d+)\.(\d+)\.(\d+)')
proj_ver_str = self.postgis_proj_version()
m = proj_regex.search(proj_ver_str)
if m:
return tuple(map(int, [m.group(1), m.group(2), m.group(3)]))
else:
raise Exception('Could not determine PROJ.4 version from PostGIS.')
def spatial_aggregate_name(self, agg_name):
if agg_name == 'Extent3D':
return self.extent3d
else:
return self.geom_func_prefix + agg_name
# Routines for getting the OGC-compliant models.
def geometry_columns(self):
return PostGISGeometryColumns
def spatial_ref_sys(self):
return PostGISSpatialRefSys
| runekaagaard/django-contrib-locking | django/contrib/gis/db/backends/postgis/operations.py | Python | bsd-3-clause | 15,371 | 0.001106 |
# Copyright 2011 OpenStack Foundation
# Copyright 2011, Piston Cloud Computing, Inc.
# Copyright 2011 Nebula, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keystoneclient import exceptions
class ServiceCatalog(object):
"""Helper methods for dealing with a Keystone Service Catalog."""
@classmethod
def factory(cls, resource_dict, token=None, region_name=None):
"""Create ServiceCatalog object given a auth token."""
if ServiceCatalogV3.is_valid(resource_dict):
return ServiceCatalogV3(token, resource_dict, region_name)
elif ServiceCatalogV2.is_valid(resource_dict):
return ServiceCatalogV2(resource_dict, region_name)
else:
raise NotImplementedError('Unrecognized auth response')
def get_token(self):
"""Fetch token details from service catalog.
Returns a dictionary containing the following::
- `id`: Token's ID
- `expires`: Token's expiration
- `user_id`: Authenticated user's ID
- `tenant_id`: Authorized project's ID
- `domain_id`: Authorized domain's ID
"""
raise NotImplementedError()
def get_endpoints(self, service_type=None, endpoint_type=None):
"""Fetch and filter endpoints for the specified service(s).
Returns endpoints for the specified service (or all) and
that contain the specified type (or all).
"""
raise NotImplementedError()
def get_urls(self, attr=None, filter_value=None,
service_type='identity', endpoint_type='publicURL'):
"""Fetch endpoint urls from the service catalog.
Fetch the endpoints from the service catalog for a particular
endpoint attribute. If no attribute is given, return the first
endpoint of the specified type.
:param string attr: Endpoint attribute name.
:param string filter_value: Endpoint attribute value.
:param string service_type: Service type of the endpoint.
:param string endpoint_type: Type of endpoint.
Possible values: public or publicURL,
internal or internalURL,
admin or adminURL
:param string region_name: Region of the endpoint.
:returns: tuple of urls or None (if no match found)
"""
raise NotImplementedError()
def url_for(self, attr=None, filter_value=None,
service_type='identity', endpoint_type='publicURL'):
"""Fetch an endpoint from the service catalog.
Fetch the specified endpoint from the service catalog for
a particular endpoint attribute. If no attribute is given, return
the first endpoint of the specified type.
Valid endpoint types: `public` or `publicURL`,
`internal` or `internalURL`,
`admin` or 'adminURL`
"""
raise NotImplementedError()
def get_data(self):
"""Get the raw catalog structure.
Get the version dependant catalog structure as it is presented within
the resource.
:returns: dict containing raw catalog data or None
"""
raise NotImplementedError()
class ServiceCatalogV2(ServiceCatalog):
"""An object for encapsulating the service catalog using raw v2 auth token
from Keystone.
"""
def __init__(self, resource_dict, region_name=None):
self.catalog = resource_dict
self.region_name = region_name
@classmethod
def is_valid(cls, resource_dict):
# This class is also used for reading token info of an unscoped token.
# Unscoped token does not have 'serviceCatalog' in V2, checking this
# will not work. Use 'token' attribute instead.
return 'token' in resource_dict
def get_data(self):
return self.catalog.get('serviceCatalog')
def get_token(self):
token = {'id': self.catalog['token']['id'],
'expires': self.catalog['token']['expires']}
try:
token['user_id'] = self.catalog['user']['id']
token['tenant_id'] = self.catalog['token']['tenant']['id']
except Exception:
# just leave the tenant and user out if it doesn't exist
pass
return token
def get_endpoints(self, service_type=None, endpoint_type=None):
if endpoint_type and 'URL' not in endpoint_type:
endpoint_type = endpoint_type + 'URL'
sc = {}
for service in (self.get_data() or []):
if service_type and service_type != service['type']:
continue
sc[service['type']] = []
for endpoint in service['endpoints']:
if endpoint_type and endpoint_type not in endpoint:
continue
sc[service['type']].append(endpoint)
return sc
def get_urls(self, attr=None, filter_value=None,
service_type='identity', endpoint_type='publicURL'):
sc_endpoints = self.get_endpoints(service_type=service_type,
endpoint_type=endpoint_type)
endpoints = sc_endpoints.get(service_type)
if not endpoints:
return
if endpoint_type and 'URL' not in endpoint_type:
endpoint_type = endpoint_type + 'URL'
return tuple(endpoint[endpoint_type]
for endpoint in endpoints
if (endpoint_type in endpoint
and (not self.region_name
or endpoint.get('region') == self.region_name)
and (not filter_value
or endpoint.get(attr) == filter_value)))
def url_for(self, attr=None, filter_value=None,
service_type='identity', endpoint_type='publicURL'):
catalog = self.get_data()
if not catalog:
raise exceptions.EmptyCatalog('The service catalog is empty.')
if 'URL' not in endpoint_type:
endpoint_type = endpoint_type + 'URL'
for service in catalog:
if service['type'] != service_type:
continue
endpoints = service['endpoints']
for endpoint in endpoints:
if (self.region_name and
endpoint.get('region') != self.region_name):
continue
if not filter_value or endpoint.get(attr) == filter_value:
return endpoint[endpoint_type]
raise exceptions.EndpointNotFound('%s endpoint for %s not found.' %
(endpoint_type, service_type))
class ServiceCatalogV3(ServiceCatalog):
"""An object for encapsulating the service catalog using raw v3 auth token
from Keystone.
"""
def __init__(self, token, resource_dict, region_name=None):
self._auth_token = token
self.catalog = resource_dict
self.region_name = region_name
@classmethod
def is_valid(cls, resource_dict):
# This class is also used for reading token info of an unscoped token.
# Unscoped token does not have 'catalog', checking this
# will not work. Use 'methods' attribute instead.
return 'methods' in resource_dict
def get_data(self):
return self.catalog.get('catalog')
def get_token(self):
token = {'id': self._auth_token,
'expires': self.catalog['expires_at']}
try:
token['user_id'] = self.catalog['user']['id']
domain = self.catalog.get('domain')
if domain:
token['domain_id'] = domain['id']
project = self.catalog.get('project')
if project:
token['tenant_id'] = project['id']
except Exception:
# just leave the domain, project and user out if it doesn't exist
pass
return token
def get_endpoints(self, service_type=None, endpoint_type=None):
if endpoint_type:
endpoint_type = endpoint_type.rstrip('URL')
sc = {}
for service in (self.get_data() or []):
if service_type and service_type != service['type']:
continue
sc[service['type']] = []
for endpoint in service['endpoints']:
if endpoint_type and endpoint_type != endpoint['interface']:
continue
sc[service['type']].append(endpoint)
return sc
def get_urls(self, attr=None, filter_value=None,
service_type='identity', endpoint_type='public'):
if endpoint_type:
endpoint_type = endpoint_type.rstrip('URL')
sc_endpoints = self.get_endpoints(service_type=service_type,
endpoint_type=endpoint_type)
endpoints = sc_endpoints.get(service_type)
if not endpoints:
return None
urls = list()
for endpoint in endpoints:
if (endpoint['interface'] == endpoint_type
and (not self.region_name
or endpoint.get('region') == self.region_name)
and (not filter_value
or endpoint.get(attr) == filter_value)):
urls.append(endpoint['url'])
return tuple(urls)
def url_for(self, attr=None, filter_value=None,
service_type='identity', endpoint_type='public'):
catalog = self.get_data()
if not catalog:
raise exceptions.EmptyCatalog('The service catalog is empty.')
if endpoint_type:
endpoint_type = endpoint_type.rstrip('URL')
for service in catalog:
if service['type'] != service_type:
continue
endpoints = service['endpoints']
for endpoint in endpoints:
if endpoint.get('interface') != endpoint_type:
continue
if (self.region_name and
endpoint.get('region') != self.region_name):
continue
if not filter_value or endpoint.get(attr) == filter_value:
return endpoint['url']
raise exceptions.EndpointNotFound('%s endpoint for %s not found.' %
(endpoint_type, service_type))
| raildo/python-keystoneclient | python-keystoneclient-0.4.1.7.gdca1d42/keystoneclient/service_catalog.py | Python | apache-2.0 | 11,013 | 0 |
from django.test import override_settings
from incuna_test_utils.testcases.api_request import (
BaseAPIExampleTestCase, BaseAPIRequestTestCase,
)
from tests.factories import UserFactory
class APIRequestTestCase(BaseAPIRequestTestCase):
user_factory = UserFactory
def test_create_request_format(self):
request = self.create_request()
assert request.META['format'] == 'json'
def test_create_request_auth(self):
request = self.create_request()
assert request.user.is_authenticated
def test_create_request_no_auth(self):
request = self.create_request(auth=False)
assert not request.user.is_authenticated
class APIExampleTestCase(BaseAPIExampleTestCase):
@override_settings(ALLOWED_HOSTS=['localhost'])
def test_create_request(self):
request = self.create_request(auth=False)
assert request.get_host() == self.SERVER_NAME
| incuna/incuna-test-utils | tests/testcases/test_api_request.py | Python | bsd-2-clause | 918 | 0 |
from django.shortcuts import render_to_response
from django.template import RequestContext
from apps.members.models import Member
def show_all_current_members(request):
members = Member.objects.filter(is_renegade=False).order_by('function', 'started_nsi_date')
return render_to_response(
'show_all_current_members.html',
{'members': members},
context_instance=RequestContext(request)
)
def show_member(request, slug):
member = Member.objects.get(slug=slug)
participation_list = member.participation_set.all()
members = Member.objects.all()
return render_to_response(
'show_member.html',
{'member': member, 'participation_list': participation_list, 'members': members},
context_instance=RequestContext(request)
)
def show_all_former_members(request):
members = Member.objects.filter(is_renegade=True)
return render_to_response(
'show_all_former_members.html',
{'members': members},
context_instance=RequestContext(request)
)
| nsi-iff/nsi_site | apps/members/views.py | Python | mit | 1,044 | 0.003831 |
#!/usr/bin/env python
### <command interpreter="python">send_to_cgd.py
### $pipeline_output $endpoint $cgd_url $output $runid $barcodeid $qcversion
### </command>
### Galaxy wrapper for cgd_client.jar.
### CGD_CLIENT is hard coded, but this is not expected to move.
import argparse
import subprocess
from subprocess import Popen, STDOUT, PIPE
import os
import sys
import shutil
def renameOutput(runid, barcodeid, endpoint):
"""
CGD needs the filename to be restructured.
"""
if endpoint == "uploadqcsheet":
newfile = "/tmp/" + '_'.join([runid, barcodeid, "R1"]) + ".html"
elif endpoint == "uploadqcsheetrtwo":
newfile = "/tmp/" + '_'.join([runid, barcodeid, "R2"]) + ".html"
else:
print("Not sending FastQC.")
return None
return newfile
def splitUrl(url, n):
return url.split('/')[-n:]
def main():
# CGD_CLIENT="/opt/installed/cgd_client-1.0.7.jar"
CGD_CLIENT="/home/exacloud/clinical/installedTest/cgd_client-1.0.7.jar"
parser = argparse.ArgumentParser(description='')
parser.add_argument("--pipeline_out", help='')
parser.add_argument("--cgd_url", help='')
parser.add_argument(dest='stdout_log', help='')
parser.add_argument(dest='endpoint', help='')
parser.add_argument("--runid", help='')
parser.add_argument("--barcodeid", help='')
parser.add_argument("--qcversion", help='')
args = parser.parse_args()
if args.endpoint != "none":
newfile = renameOutput(args.runid, args.barcodeid, args.endpoint)
else:
id_list = splitUrl(args.cgd_url, 3)
newfile = renameOutput(id_list[1], id_list[2], id_list[0])
if args.endpoint == "uploadqcsheet" or args.endpoint == "uploadqcsheetrtwo":
print("Copying to " + newfile)
shutil.copyfile(args.pipeline_out, newfile)
cmd = ["java", "-jar", CGD_CLIENT, "-f", newfile, "-n", args.endpoint]
else:
if args.pipeline_out:
cmd = ["java", "-jar", CGD_CLIENT, "-f", args.pipeline_out, "-n", args.endpoint]
else:
cmd = ["java", "-jar", CGD_CLIENT, "-n", args.endpoint]
if args.cgd_url:
# cmd.append("-u")
# cmd.append(args.cgd_url)
cmd = ["java", "-jar", CGD_CLIENT, "-f", newfile, "-u", args.cgd_url]
if args.runid:
cmd.append("-r")
cmd.append(args.runid)
if args.barcodeid:
cmd.append("-b")
cmd.append(args.barcodeid)
if args.qcversion:
cmd.append("-v")
cmd.append(args.qcversion)
cmd.append("-d")
print("We are running this command:")
print(' '.join(cmd))
proc = subprocess.call(cmd)
outfile = open(args.stdout_log, 'w')
outfile.write("The process has run.")
outfile.close()
## Clean up temp file.
if newfile != None:
os.remove(newfile)
if __name__ == "__main__":
main()
| jhl667/galaxy_tools | tools/jhl_tools/send_to_cgd.py | Python | apache-2.0 | 2,897 | 0.007249 |
"""
Module to parse a Lettvin Results File C array of vertex points (and distances) into an OBJ file format of vertex points.
arg1: the path to the results file. (File must exist)
arg2: the file extension of the output OBJ filename. (e.g. ".obj") Default is ".obj" (File must not exist)
Vertex points are written in the OBJ file format, as they are found in the result file.
WARNING:
- The faces of the OBJ file are not known and are therefore are written to the OBJ file as a single triangle, from vertices 1,2,3.
"""
import sys, os
# Check input file exists:
path = sys.argv[1]
assert os.path.exists( path ) , "Input file does not exist, aborting.\nFilename: "+str(path)
file_extension = sys.argv[2] if sys.argv[2] == "" else ".obj"
assert not os.path.exists( path+file_extension ), "Output file already exists, aborting.\nFilename: "+str(path+file_extension)
# extract the c array of vertex points from result file:
l = []
with open(path, 'rb') as f:
c = 0
for line in f:
if line.strip().startswith("{"):
l.append( line.strip() )
c+=1
# convert string of c array to python list
positions = eval("["+("".join(l)).replace("{","[").replace("}","]")+"]")
# remove different value
pos = [x[0:3] for x in positions]
# prepare the Obj file format header and content:
w = []
w.append("""# Blender v2.69 (sub 0) OBJ File: '"""+path+"""'
www.blender.org
mtllib positions_diffuse.mtl
o """+path)
for x in pos:
w.append("v "+str(x[0])+" "+str(x[1])+" "+str(x[2]))
# include an arbitrary face to hide file format parse errors later..
w.append("""usemtl None
s off
f 1 2 3""")
# write out the obj file:
f = open(str(path)+str(file_extension),'w')
for s in w:
f.write( str(s) +"\n")
| LightStage-Aber/LightStage-Repo | exp/Lettvin_Repulsion/helpers/parse_diffuse_results_to_obj_file.py | Python | apache-2.0 | 1,750 | 0.017714 |
#!/usr/bin/env python
# Copyright 2016 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import functools
import re
import itertools
import shutil
import logging
import os
from lago import log_utils
from lago.utils import (
run_command,
LockFile,
)
from . import utils
LOGGER = logging.getLogger(__name__)
LogTask = functools.partial(log_utils.LogTask, logger=LOGGER)
log_task = functools.partial(log_utils.log_task, logger=LOGGER)
class RepositoryError(Exception):
pass
class RepositoryMergeError(RepositoryError):
pass
def merge(output_dir, sources, repoman_config=None):
"""
Run repoman on ``sources``, creating a new RPM repository in
``output_dir``
Args:
output_dir(str): Path to create new repository
sources(list of str): repoman sources
repoman_config(str): repoman configuration file, if not passed it will
use default repoman configurations, equivalent to:
| [main]
| on_empty_source=warn
| [store.RPMStore]
| on_wrong_distro=copy_to_all
| with_srcrpms=false
| with_sources=false
Raises:
:exc:`RepositoryMergeError`: If repoman command failed.
:exc:`IOError`: If ``repoman_config`` is passed but does not exists.
Returns:
None
"""
cmd = []
cmd_suffix = [
'--option=store.RPMStore.rpm_dir=', output_dir, 'add'
] + sources
if repoman_config is None:
repoman_params = [
'--option=main.on_empty_source=warn',
'--option=store.RPMStore.on_wrong_distro=copy_to_all',
'--option=store.RPMStore.with_srcrpms=false',
'--option=store.RPMStore.with_sources=false',
]
cmd = ['repoman'] + repoman_params + cmd_suffix
else:
if os.path.isfile(repoman_config):
cmd = ['repoman', '--config={0}'.format(repoman_config)
] + cmd_suffix
else:
raise IOError(
('error running repoman, {0} not '
'found').format(repoman_config)
)
with LogTask('Running repoman'):
res = run_command(cmd)
if res.code:
raise RepositoryMergeError(
(
'Failed merging repoman sources: {0} into directory: {1}, '
'check lago.log for repoman output '
).format(sources, output_dir)
)
def with_repo_server(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
with utils.repo_server_context(args[0]):
return func(*args, **kwargs)
return wrapper
def _fix_reposync_issues(reposync_out, repo_path):
"""
Fix for the issue described at::
https://bugzilla.redhat.com//show_bug.cgi?id=1399235
https://bugzilla.redhat.com//show_bug.cgi?id=1332441
"""
if len(repo_path) == 0 or len(reposync_out) == 0:
LOGGER.warning(
(
'unable to run _fix_reposync_issues, no reposync output '
'or empty repo path.'
)
)
return
rpm_regex = r'[a-z]{1}[a-zA-Z0-9._\\-]+'
wrong_version = re.compile(
r'(?P<package_name>' + rpm_regex + r'): \[Errno 256\]'
)
wrong_release = re.compile(r'(?P<package_name>' + rpm_regex + r') FAILED')
packages = set(
itertools.chain(
wrong_version.findall(reposync_out),
wrong_release.findall(reposync_out)
)
)
count = 0
LOGGER.debug(
'detected package errors in reposync output in repo_path:%s: %s',
repo_path, ','.join(packages)
)
for dirpath, _, filenames in os.walk(repo_path):
rpms = (
file for file in filenames
if file.endswith('.rpm') and dirpath.startswith(repo_path)
)
for rpm in rpms:
if any(map(rpm.startswith, packages)):
bad_package = os.path.join(dirpath, rpm)
LOGGER.info('removing conflicting RPM: %s', bad_package)
os.unlink(bad_package)
count = count + 1
if count > 0:
LOGGER.debug(
(
'removed %s conflicting packages, see '
'https://bugzilla.redhat.com//show_bug.cgi?id=1399235 '
'for more details.'
), count
)
def sync_rpm_repository(repo_path, yum_config, repos):
lock_path = os.path.join(repo_path, 'repolock')
if not os.path.exists(repo_path):
os.makedirs(repo_path)
reposync_base_cmd = [
'reposync', '--config=%s' % yum_config,
'--download_path=%s' % repo_path, '--newest-only', '--delete',
'--cachedir=%s/cache' % repo_path
]
with LogTask('Running reposync'):
for repo in repos:
with LockFile(lock_path, timeout=180):
reposync_cmd = reposync_base_cmd + ['--repoid=%s' % repo]
ret, out, _ = run_command(reposync_cmd)
if not ret:
LOGGER.debug('reposync on repo %s: success.' % repo)
continue
LOGGER.info('repo: %s: failed, re-running.', repo)
_fix_reposync_issues(
reposync_out=out, repo_path=os.path.join(repo_path, repo)
)
ret, _, _ = run_command(reposync_cmd)
if not ret:
continue
LOGGER.info(
'repo: %s: failed. clearing cache and re-running.', repo
)
shutil.rmtree('%s/cache' % repo_path)
ret, out, err = run_command(reposync_cmd)
if ret:
LOGGER.error(
'reposync command failed for repoid: %s', repo
)
LOGGER.error(
'reposync stdout for repoid: %s: \n%s', repo, out
)
LOGGER.error(
'reposync stderr for repoid: %s: \n%s', repo, err
)
raise RuntimeError(
(
'Failed to run reposync 3 times '
'for repoid: %s, aborting.'
) % repo
)
| leongold/lago | ovirtlago/reposetup.py | Python | gpl-2.0 | 7,082 | 0 |
#
# Copyright (C) 2021 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
import unittest
from unittest.mock import patch
from pyanaconda.modules.boss.installation import SetContextsTask
class SetContextsTaskTest(unittest.TestCase):
@patch("pyanaconda.modules.boss.installation.execWithRedirect")
def test_run(self, exec_mock):
"""Test SetContextsTask success."""
task = SetContextsTask("/somewhere")
with self.assertLogs() as cm:
task.run()
exec_mock.assert_called_once_with(
"restorecon",
[
"-ir",
"/boot",
"/dev",
"/etc",
"/lib64",
"/root",
"/usr/lib64",
"/var/cache/yum",
"/var/home",
"/var/lib",
"/var/lock",
"/var/log",
"/var/media",
"/var/mnt",
"/var/opt",
"/var/roothome",
"/var/run",
"/var/spool",
"/var/srv"
],
root="/somewhere"
)
logs = "\n".join(cm.output)
assert "not available" not in logs
@patch("pyanaconda.modules.boss.installation.execWithRedirect")
def test_restorecon_missing(self, exec_mock):
"""Test SetContextsTask with missing restorecon."""
exec_mock.side_effect = FileNotFoundError("testing")
task = SetContextsTask("/somewhere")
with self.assertLogs() as cm:
task.run() # asserts also that exception is not raised
logs = "\n".join(cm.output)
assert "not available" in logs
| M4rtinK/anaconda | tests/unit_tests/pyanaconda_tests/modules/boss/test_set_file_contexts_task.py | Python | gpl-2.0 | 2,587 | 0 |
import time
import pytest
@pytest.mark.moto
@pytest.mark.asyncio
async def test_list_queues(sqs_client, sqs_queue_url):
response = await sqs_client.list_queues()
pytest.aio.assert_status_code(response, 200)
assert sqs_queue_url in response['QueueUrls']
@pytest.mark.moto
@pytest.mark.asyncio
async def test_get_queue_name(sqs_client, sqs_queue_url):
queue_name = sqs_queue_url.rsplit('/', 1)[-1]
response = await sqs_client.get_queue_url(QueueName=queue_name)
pytest.aio.assert_status_code(response, 200)
assert sqs_queue_url == response['QueueUrl']
@pytest.mark.moto
@pytest.mark.asyncio
async def test_put_pull_delete_test(sqs_client, sqs_queue_url):
response = await sqs_client.send_message(
QueueUrl=sqs_queue_url,
MessageBody='test_message_1',
MessageAttributes={
'attr1': {'DataType': 'String', 'StringValue': 'value1'}
}
)
pytest.aio.assert_status_code(response, 200)
response = await sqs_client.receive_message(
QueueUrl=sqs_queue_url,
MessageAttributeNames=['attr1']
)
pytest.aio.assert_status_code(response, 200)
# Messages wont be a key if its empty
assert len(response.get('Messages', [])) == 1
msg = response['Messages'][0]
assert msg['Body'] == 'test_message_1'
assert msg['MessageAttributes']['attr1']['StringValue'] == 'value1'
receipt_handle = response['Messages'][0]['ReceiptHandle']
response = await sqs_client.delete_message(
QueueUrl=sqs_queue_url,
ReceiptHandle=receipt_handle
)
pytest.aio.assert_status_code(response, 200)
response = await sqs_client.receive_message(
QueueUrl=sqs_queue_url,
)
pytest.aio.assert_status_code(response, 200)
assert len(response.get('Messages', [])) == 0
@pytest.mark.moto
@pytest.mark.asyncio
async def test_put_pull_wait(sqs_client, sqs_queue_url):
start = time.perf_counter()
response = await sqs_client.receive_message(
QueueUrl=sqs_queue_url,
WaitTimeSeconds=2
)
end = time.perf_counter()
pytest.aio.assert_status_code(response, 200)
assert 'Messages' not in response
assert end - start > 1.5
| aio-libs/aiobotocore | tests/test_sqs.py | Python | apache-2.0 | 2,198 | 0 |
#!/usr/bin/env python
import sys, logging, getpass, subprocess, os, json
# List of Heroku App ids to update
_heroku_app_ids = None
_HEROKU_APP_IDS_ENV_KEY = "HEROKU_APP_IDS"
def get_heroku_app_ids():
global _heroku_app_ids
# Lazy load
if _heroku_app_ids is None:
env = os.environ.get(_HEROKU_APP_IDS_ENV_KEY)
# Check if environment var is set
if env is None:
err_txt = "{} not set".format(_HEROKU_APP_IDS_ENV_KEY)
logging.exception(err_txt)
raise ValueError(err_txt)
# Parse env into list
try:
_heroku_app_ids = json.loads(env)
except json.JSONDecodeError as e:
err_txt = "Error parsing {} environment variable to json".format(_HEROKU_APP_IDS_ENV_KEY)
logging.exception(err_txt)
raise SyntaxError(err_txt)
return _heroku_app_ids
# str to log with identifying info
_identifying_info = None
"""Get identifying information of computer user in loggable str
This includes:
- Computer user name
- Git user name
- Git user email
Note: This doesn't actually get identifiable information in a computer forensics way.
More of a "Who at the company did what last" way (At best: Who broke what).
Returns:
- str: Identifying information for computer user in format: user.username="{}", git.user.name="{}", git.user.email="{}"
"""
def get_identifying_info():
global _identifying_info
# Lazy load
if _identifying_info is None:
# Get user's name
username = None
try:
username = getpass.getuser()
except:
logging.exception("Error while trying to get user's username")
# Get Git information
git_user = None
git_email = None
if which("git") is not None:
try:
git_user = cmd_output(["git", "config", "user.name"])
except Exception as e:
logging.exception("Error while trying to find user's git.user.name")
try:
git_email = cmd_output(["git", "config", "user.email"])
except Exception as e:
logging.exception("Error while trying to find user's git.user.email")
_identifying_info = "user.username=\"{}\", git.user.name=\"{}\", git.user.email=\"{}\"".format(username, git_user, git_email)
return _identifying_info
# Determines if file path is exe, credit to author of which function (See docs)
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
"""Command which emulates `which` UNIX command
Credit to users Jay(https://stackoverflow.com/users/20840/jay) and harmv(https://stackoverflow.com/users/328384/harmv)
on SO for the code: http://stackoverflow.com/a/377028/1478191
Returns the full path to a program accessable from the PATH.
Args:
- program (str): Name of program
Returns:
- str: Full path to excitable file
- None: If executable is not found anywhere
"""
def which(program):
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def clean_cmd_output (output):
if output is not None:
output = output.decode("utf-8").rstrip()
return output
"""Returns the output of the given command
Args:
- cmds (str[]): Array of commands parts
Returns:
- str: Command output
- None: If no command output was received
"""
def cmd_output(cmds):
proc = subprocess.Popen(cmds, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
output, err = proc.communicate()
# Trimp output if exists
output = clean_cmd_output(output)
err = clean_cmd_output(err)
# Raises Exception if stderr output exists
if (err is not None) and (len(err) > 0):
raise Exception("Error while running command: \"{}\"".format(err))
return output
###################
# HOOKS #
###################
"""Dehydrated 'deploy_cert' hook handler
Purpose is to deploy successfully retrieved SSL certs to Heroku applications
Args:
- args (object[]): Command lind arguments without filename or hook name. Expected format:
[domain, key_file, cert_file, full_chain_file, chain_file, timestamp]
- domain (str): Root domain name on SSL cert: certificate common name (CN).
- key_file (str): Path to SSL cert private key file, second argument in heroku certs:update.
- cert_file (str): Path to SSL cert signed certificate file, first argument in heroku certs:update.
- full_chain_file (str): Path to SSL cert full certificate chain file.
- chain_file (str): Path to SSL intermediate certificates file.
- timestamp (str): Timestamp when the SSL cert was created
"""
def deploy_cert(args):
# Extract args
domain, key_file, cert_file, full_chain_file, chain_file, timestamp = args
# Get Heroku app Id for domain
heroku_app_ids = None
try:
heroku_app_ids = get_heroku_app_ids()
logging.debug("Got Heroku Ids=\"{}\"".format(heroku_app_ids))
except ValueError as e: # If ENV['HEROKU_AUTO_SSL_DOMAIN_MAPPING'] isn't set
logging.exception("Failed to deploy certificate for domain=\"{}\", HEROKU_AUTO_SSL_DOMAIN_MAPPING environment variable not set".format(domain))
return
except SyntaxError as e:
logging.exception("Failed to deploy certificate for domain=\"{}\", HEROKU_AUTO_SSL_DOMAIN_MAPPING syntax invalid".format(domain))
return
# Deploy certs
logging.info("Deploying certificates to Heroku apps: {}".format(heroku_app_ids))
command_parts = ["heroku", "certs:update", cert_file, key_file, "--app", "APP ID", "--confirm", "APP ID"]
for id in heroku_app_ids:
# Set Heroku app id in command
command_parts[len(command_parts) - 1] = id
command_parts[len(command_parts) - 3] = id
# Run
proc = subprocess.Popen(command_parts)
logging.debug("Ran: $ {}".format(" ".join(command_parts)))
logging.info("Deployed certificate for Heroku app: {}".format(id))
# END HOOKS
"""Main function called below
Called if __name__ is '__main__'
Args:
- argv (object[]): Command lind arguments (With first filename arg from Python removed)
"""
def main(argv):
# Register hooks that we handle
operations = {
'deploy_cert': deploy_cert
}
"""Call Hook Handler
Fields:
hook_name (str): Name of hook, picked from argv[0], one of:
- 'deploy_challenge'
- 'clean_challenge'
- 'deploy_cert'
- 'unchanged_cert'
- invalid_challenge'
- 'request_failure'
- 'exit_hook'
(From: https://github.com/lukas2511/dehydrated/blob/master/docs/examples/hook.sh)
hook_handler_args (str[]): Hook arguments, set by argv[1:]
"""
hook_name = argv[0]
hook_handler_args = argv[1:]
# Log hook called
logging.debug("Hook called. hook.name='{}', hook.args={}".format(hook_name, hook_handler_args))
# Log more specific info depending on hook_name
if hook_name not in operations: # We don't handle this hook
logging.debug("heroku-auto-ssl/hook.py doesn't currently handle: hook.name=\"{}\"".format(hook_name))
elif hook_name in ['deploy_cert']: # This hook could be considered a "security event"
logging.info("heroku-auto-ssl/hook.py handled: hook.name=\"{}\", by: {}".format(hook_name, get_identifying_info()))
else: # Regular hook
logging.debug("heroku-auto-ssl/hook.py handled: hook.name=\"{}\"".format(hook_name))
# Call hook if we handle it
if hook_name in operations:
operations[hook_name](hook_handler_args)
# Call main
if __name__ == '__main__':
# Setup logging
logging.basicConfig(filename="heroku-auto-ssl.log",
level=logging.DEBUG,
format='%(asctime)s %(module)s %(name)s.%(funcName)s +%(lineno)s: %(levelname)-8s [%(process)d] %(message)s')
logging.getLogger().addHandler(logging.StreamHandler()) # Make log to file and console
# argv[1:] - Args after file name
main(sys.argv[1:])
| AdmitHub/heroku-auto-ssl | hooks/heroku-auto-ssl/hook.py | Python | mit | 8,401 | 0.00488 |
# This file is part of MSMTools.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# MSMTools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r"""This module provides matrix-decomposition based functions for the
analysis of stochastic matrices
Below are the dense implementations for functions specified in api
Dense matrices are represented by numpy.ndarrays throughout this module.
.. moduleauthor:: B.Trendelkamp-Schroer <benjamin DOT trendelkamp-schroer AT fu-berlin DOT de>
"""
import numpy as np
import numbers
import warnings
from scipy.linalg import eig, eigh, eigvals, eigvalsh, solve
from ...util.exceptions import SpectralWarning, ImaginaryEigenValueWarning
from .stationary_vector import stationary_distribution
from .assessment import is_reversible
def eigenvalues(T, k=None, reversible=False, mu=None):
r"""Compute eigenvalues of given transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
k : int or tuple of ints, optional
Compute the first k eigenvalues of T
reversible : bool, optional
Indicate that transition matrix is reversible
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
eig : (n,) ndarray,
The eigenvalues of T ordered with decreasing absolute value.
If k is None then n=d, if k is int then n=k otherwise
n is the length of the given tuple of eigenvalue indices.
Notes
-----
Eigenvalues are computed using the numpy.linalg interface
for the corresponding LAPACK routines.
If reversible=True the the eigenvalues of the similar symmetric
matrix `\sqrt(\mu_i / \mu_j) p_{ij}` will be computed.
The precomputed stationary distribution will only be used if
reversible=True.
"""
if reversible:
try:
evals = eigenvalues_rev(T, k=k, mu=mu)
except ValueError:
evals = eigvals(T).real # use fallback code but cast to real
else:
evals = eigvals(T) # nonreversible
"""Sort by decreasing absolute value"""
ind = np.argsort(np.abs(evals))[::-1]
evals = evals[ind]
if isinstance(k, (list, set, tuple)):
try:
return [evals[n] for n in k]
except IndexError:
raise ValueError("given indices do not exist: ", k)
elif k is not None:
return evals[: k]
else:
return evals
def eigenvalues_rev(T, k=None, mu=None):
r"""Compute eigenvalues of reversible transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
k : int or tuple of ints, optional
Compute the first k eigenvalues of T
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
eig : (n,) ndarray,
The eigenvalues of T ordered with decreasing absolute value.
If k is None then n=d, if k is int then n=k otherwise
n is the length of the given tuple of eigenvalue indices.
Raises
------
ValueError
If stationary distribution is nonpositive.
"""
"""compute stationary distribution if not given"""
if mu is None:
mu = stationary_distribution(T)
if np.any(mu <= 0):
raise ValueError('Cannot symmetrize transition matrix')
""" symmetrize T """
smu = np.sqrt(mu)
S = smu[:,None] * T / smu
""" symmetric eigenvalue problem """
evals = eigvalsh(S)
return evals
def eigenvectors(T, k=None, right=True, reversible=False, mu=None):
r"""Compute eigenvectors of transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
k : int or tuple of ints, optional
Compute the first k eigenvalues of T
right : bool, optional
If right=True compute right eigenvectors, left eigenvectors
otherwise
reversible : bool, optional
Indicate that transition matrix is reversible
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
eigvec : (d, n) ndarray
The eigenvectors of T ordered with decreasing absolute value
of the corresponding eigenvalue. If k is None then n=d, if k
is int then n=k otherwise n is the length of the given tuple
of eigenvector indices
Notes
-----
Eigenvectors are computed using the numpy.linalg interface for the
corresponding LAPACK routines.
If reversible=True the the eigenvectors of the similar symmetric
matrix `\sqrt(\mu_i / \mu_j) p_{ij}` will be used to compute the
eigenvectors of T.
The precomputed stationary distribution will only be used if
reversible=True.
"""
if reversible:
eigvec = eigenvectors_rev(T, right=right, mu=mu)
else:
eigvec = eigenvectors_nrev(T, right=right)
""" Return eigenvectors """
if k is None:
return eigvec
elif isinstance(k, numbers.Integral):
return eigvec[:, 0:k]
else:
ind = np.asarray(k)
return eigvec[:, ind]
def eigenvectors_nrev(T, right=True):
r"""Compute eigenvectors of transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
k : int or tuple of ints, optional
Compute the first k eigenvalues of T
right : bool, optional
If right=True compute right eigenvectors, left eigenvectors
otherwise
Returns
-------
eigvec : (d, d) ndarray
The eigenvectors of T ordered with decreasing absolute value
of the corresponding eigenvalue
"""
if right:
val, R = eig(T, left=False, right=True)
""" Sorted eigenvalues and left and right eigenvectors. """
perm = np.argsort(np.abs(val))[::-1]
# eigval=val[perm]
eigvec = R[:, perm]
else:
val, L = eig(T, left=True, right=False)
""" Sorted eigenvalues and left and right eigenvectors. """
perm = np.argsort(np.abs(val))[::-1]
# eigval=val[perm]
eigvec = L[:, perm]
return eigvec
def eigenvectors_rev(T, right=True, mu=None):
r"""Compute eigenvectors of reversible transition matrix.
Parameters
----------
T : (d, d) ndarray
Transition matrix (stochastic matrix)
right : bool, optional
If right=True compute right eigenvectors, left eigenvectors
otherwise
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
eigvec : (d, d) ndarray
The eigenvectors of T ordered with decreasing absolute value
of the corresponding eigenvalue
"""
if mu is None:
mu = stationary_distribution(T)
""" symmetrize T """
smu = np.sqrt(mu)
S = smu[:,None] * T / smu
val, eigvec = eigh(S)
"""Sort eigenvectors"""
perm = np.argsort(np.abs(val))[::-1]
eigvec = eigvec[:, perm]
if right:
return eigvec / smu[:, np.newaxis]
else:
return eigvec * smu[:, np.newaxis]
def rdl_decomposition(T, k=None, reversible=False, norm='standard', mu=None):
r"""Compute the decomposition into left and right eigenvectors.
Parameters
----------
T : (M, M) ndarray
Transition matrix
k : int (optional)
Number of eigenvector/eigenvalue pairs
norm: {'standard', 'reversible', 'auto'}
standard: (L'R) = Id, L[:,0] is a probability distribution,
the stationary distribution mu of T. Right eigenvectors
R have a 2-norm of 1.
reversible: R and L are related via L=L[:,0]*R.
auto: will be reversible if T is reversible, otherwise standard
reversible : bool, optional
Indicate that transition matrix is reversible
mu : (d,) ndarray, optional
Stationary distribution of T
Returns
-------
R : (M, M) ndarray
The normalized (with respect to L) right eigenvectors, such that the
column R[:,i] is the right eigenvector corresponding to the eigenvalue
w[i], dot(T,R[:,i])=w[i]*R[:,i]
D : (M, M) ndarray
A diagonal matrix containing the eigenvalues, each repeated
according to its multiplicity
L : (M, M) ndarray
The normalized (with respect to `R`) left eigenvectors, such that the
row ``L[i, :]`` is the left eigenvector corresponding to the eigenvalue
``w[i]``, ``dot(L[i, :], T)``=``w[i]*L[i, :]``
Notes
-----
If reversible=True the the eigenvalues and eigenvectors of the
similar symmetric matrix `\sqrt(\mu_i / \mu_j) p_{ij}` will be
used to compute the eigenvalues and eigenvectors of T.
The precomputed stationary distribution will only be used if
reversible=True.
"""
# auto-set norm
if norm == 'auto':
if is_reversible(T):
norm = 'reversible'
else:
norm = 'standard'
if reversible:
R, D, L = rdl_decomposition_rev(T, norm=norm, mu=mu)
else:
R, D, L = rdl_decomposition_nrev(T, norm=norm)
if reversible or norm == 'reversible':
D = D.real
if k is None:
return R, D, L
else:
return R[:, 0:k], D[0:k, 0:k], L[0:k, :]
def rdl_decomposition_nrev(T, norm='standard'):
r"""Decomposition into left and right eigenvectors.
Parameters
----------
T : (M, M) ndarray
Transition matrix
norm: {'standard', 'reversible'}
standard: (L'R) = Id, L[:,0] is a probability distribution,
the stationary distribution mu of T. Right eigenvectors
R have a 2-norm of 1
reversible: R and L are related via L=L[:,0]*R
Returns
-------
R : (M, M) ndarray
The normalized (with respect to L) right eigenvectors, such that the
column R[:,i] is the right eigenvector corresponding to the eigenvalue
w[i], dot(T,R[:,i])=w[i]*R[:,i]
D : (M, M) ndarray
A diagonal matrix containing the eigenvalues, each repeated
according to its multiplicity
L : (M, M) ndarray
The normalized (with respect to `R`) left eigenvectors, such that the
row ``L[i, :]`` is the left eigenvector corresponding to the eigenvalue
``w[i]``, ``dot(L[i, :], T)``=``w[i]*L[i, :]``
"""
d = T.shape[0]
w, R = eig(T)
"""Sort by decreasing magnitude of eigenvalue"""
ind = np.argsort(np.abs(w))[::-1]
w = w[ind]
R = R[:, ind]
"""Diagonal matrix containing eigenvalues"""
D = np.diag(w)
# Standard norm: Euclidean norm is 1 for r and LR = I.
if norm == 'standard':
L = solve(np.transpose(R), np.eye(d))
"""l1- normalization of L[:, 0]"""
R[:, 0] = R[:, 0] * np.sum(L[:, 0])
L[:, 0] = L[:, 0] / np.sum(L[:, 0])
return R, D, np.transpose(L)
# Reversible norm:
elif norm == 'reversible':
b = np.zeros(d)
b[0] = 1.0
A = np.transpose(R)
nu = solve(A, b)
mu = nu / np.sum(nu)
"""Ensure that R[:,0] is positive"""
R[:, 0] = R[:, 0] / np.sign(R[0, 0])
"""Use mu to connect L and R"""
L = mu[:, np.newaxis] * R
"""Compute overlap"""
s = np.diag(np.dot(np.transpose(L), R))
"""Renormalize left-and right eigenvectors to ensure L'R=Id"""
R = R / np.sqrt(s[np.newaxis, :])
L = L / np.sqrt(s[np.newaxis, :])
return R, D, np.transpose(L)
else:
raise ValueError("Keyword 'norm' has to be either 'standard' or 'reversible'")
def rdl_decomposition_rev(T, norm='reversible', mu=None):
r"""Decomposition into left and right eigenvectors for reversible
transition matrices.
Parameters
----------
T : (M, M) ndarray
Transition matrix
norm: {'standard', 'reversible'}
standard: (L'R) = Id, L[:,0] is a probability distribution,
the stationary distribution mu of T. Right eigenvectors
R have a 2-norm of 1.
reversible: R and L are related via L=L[:,0]*R.
mu : (M,) ndarray, optional
Stationary distribution of T
Returns
-------
R : (M, M) ndarray
The normalized (with respect to L) right eigenvectors, such that the
column R[:,i] is the right eigenvector corresponding to the eigenvalue
w[i], dot(T,R[:,i])=w[i]*R[:,i]
D : (M, M) ndarray
A diagonal matrix containing the eigenvalues, each repeated
according to its multiplicity
L : (M, M) ndarray
The normalized (with respect to `R`) left eigenvectors, such that the
row ``L[i, :]`` is the left eigenvector corresponding to the eigenvalue
``w[i]``, ``dot(L[i, :], T)``=``w[i]*L[i, :]``
Notes
-----
The eigenvalues and eigenvectors of the similar symmetric matrix
`\sqrt(\mu_i / \mu_j) p_{ij}` will be used to compute the
eigenvalues and eigenvectors of T.
The stationay distribution will be computed if no precomputed stationary
distribution is given.
"""
if mu is None:
mu = stationary_distribution(T)
""" symmetrize T """
smu = np.sqrt(mu)
S = smu[:,None] * T / smu
val, eigvec = eigh(S)
"""Sort eigenvalues and eigenvectors"""
perm = np.argsort(np.abs(val))[::-1]
val = val[perm]
eigvec = eigvec[:, perm]
"""Diagonal matrix of eigenvalues"""
D = np.diag(val)
"""Right and left eigenvectors"""
R = eigvec / smu[:, np.newaxis]
L = eigvec * smu[:, np.newaxis]
"""Ensure that R[:,0] is positive and unity"""
tmp = R[0, 0]
R[:, 0] = R[:, 0] / tmp
"""Ensure that L[:, 0] is probability vector"""
L[:, 0] = L[:, 0] * tmp
if norm == 'reversible':
return R, D, L.T
elif norm == 'standard':
"""Standard l2-norm of right eigenvectors"""
w = np.diag(np.dot(R.T, R))
sw = np.sqrt(w)
"""Don't change normalization of eigenvectors for dominant eigenvalue"""
sw[0] = 1.0
R = R / sw[np.newaxis, :]
L = L * sw[np.newaxis, :]
return R, D, L.T
else:
raise ValueError("Keyword 'norm' has to be either 'standard' or 'reversible'")
def timescales(T, tau=1, k=None, reversible=False, mu=None):
r"""Compute implied time scales of given transition matrix
Parameters
----------
T : (M, M) ndarray
Transition matrix
tau : int, optional
lag time
k : int, optional
Number of time scales
reversible : bool, optional
Indicate that transition matirx is reversible
mu : (M,) ndarray, optional
Stationary distribution of T
Returns
-------
ts : (N,) ndarray
Implied time scales of the transition matrix.
If k=None then N=M else N=k
Notes
-----
If reversible=True the the eigenvalues of the similar symmetric
matrix `\sqrt(\mu_i / \mu_j) p_{ij}` will be computed.
The precomputed stationary distribution will only be used if
reversible=True.
"""
values = eigenvalues(T, reversible=reversible, mu=mu)
"""Sort by absolute value"""
ind = np.argsort(np.abs(values))[::-1]
values = values[ind]
if k is None:
values = values
else:
values = values[0:k]
"""Compute implied time scales"""
return timescales_from_eigenvalues(values, tau)
def timescales_from_eigenvalues(evals, tau=1):
r"""Compute implied time scales from given eigenvalues
Parameters
----------
evals : eigenvalues
tau : lag time
Returns
-------
ts : ndarray
The implied time scales to the given eigenvalues, in the same order.
"""
"""Check for dominant eigenvalues with large imaginary part"""
if not np.allclose(evals.imag, 0.0):
warnings.warn('Using eigenvalues with non-zero imaginary part', ImaginaryEigenValueWarning)
"""Check for multiple eigenvalues of magnitude one"""
ind_abs_one = np.isclose(np.abs(evals), 1.0, rtol=0.0, atol=1e-14)
if sum(ind_abs_one) > 1:
warnings.warn('Multiple eigenvalues with magnitude one.', SpectralWarning)
"""Compute implied time scales"""
ts = np.zeros(len(evals))
"""Eigenvalues of magnitude one imply infinite timescale"""
ts[ind_abs_one] = np.inf
"""All other eigenvalues give rise to finite timescales"""
ts[np.logical_not(ind_abs_one)] = \
-1.0 * tau / np.log(np.abs(evals[np.logical_not(ind_abs_one)]))
return ts
| markovmodel/msmtools | msmtools/analysis/dense/decomposition.py | Python | lgpl-3.0 | 17,097 | 0.000643 |
'''
Created on Feb 3, 2012
@author: marat
'''
import string
from myvector import Vector
class XYZ_file:
'''
classdocs
'''
def __init__(self,name=None):
if name==None:
self.FileName = " "
self.NumAtom = 0
self.AtomName = []
self.AtomPos = []
self.AtomVel = []
self.AtomId = []
print "am here"
else:
print "loading file", name
self.LoadFile(name)
def LoadFile(self,FileName):
fp = open(str(FileName),'r')
self.FileName = FileName
lines = fp.readlines()
NumAtom = string.atoi(lines[0])
self.NumAtom = NumAtom
AtomStr = []*4
self.AtomName = [None]*NumAtom
self.AtomId = [None]*NumAtom
self.AtomPos = [None]*NumAtom
self.AtomVel = [None]*NumAtom
x = 0.0
y = 0.0
z = 0.0
for i in range(NumAtom):
self.AtomId[i] = i+1
AtomStr = string.split(lines[i+2])[0:4]
self.AtomName[i] = AtomStr[0]
x = string.atof(AtomStr[1])
y = string.atof(AtomStr[2])
z = string.atof(AtomStr[3])
self.AtomPos[i] = Vector(x,y,z)
self.AtomVel[i] = Vector()
def AddAtom(self,Name,x,y,z):
self.AtomId.append(self.NumAtom+1)
self.AtomName.append(Name)
self.AtomPos.append(Vector(x,y,z))
self.NumAtom = len(self.AtomId)
def MoveAtom(self,i,dr):
self.AtomPos[i-1] = self.AtomPos[i-1] + dr
def SetAtomVel(self,i,v):
self.AtomVel[i-1] = v
def BondVector(self,i1,i2):
dr = self.AtomPos[i2-1] - self.AtomPos[i1-1]
return dr
def BondLength(self,i1,i2):
dr = self.AtomPos[i2-1] - self.AtomPos[i1-1]
return dr.length()
def WriteFile(self,FileName):
fp = open(str(FileName),'w')
fp.write(str(self.NumAtom))
fp.write("\n")
fp.write("molecule")
fp.write("\n")
for i in range(self.NumAtom):
fp.write(self.AtomName[i])
fp.write(" ")
fp.write(str(self.AtomPos[i]))
# fp.write(" ")
# fp.write(str(self.AtomVel[i]))
fp.write("\n")
def AppendFile(self,FileName):
fp = open(str(FileName),'a')
fp.write(str(self.NumAtom))
fp.write("\n")
fp.write("molecule")
fp.write("\n")
for i in range(self.NumAtom):
fp.write(self.AtomName[i])
fp.write(" ")
fp.write(str(self.AtomPos[i]))
# fp.write(" ")
# fp.write(str(self.AtomVel[i]))
fp.write("\n")
if __name__ == '__main__':
a = XYZ_file("test.xyz")
print a.AtomPos[0]
print a.BondLength(1, 2)
a.WriteFile("test1.xyz")
| rangsimanketkaew/NWChem | contrib/marat/python/XYZ_file.py | Python | mit | 2,936 | 0.012602 |
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import userdb
multisite_cronjobs.append(userdb.execute_userdb_job)
| ypid-bot/check_mk | web/plugins/cron/user_sync.py | Python | gpl-2.0 | 1,544 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.services.types import conversion_upload_service
from google.rpc import status_pb2 # type: ignore
from .transports.base import (
ConversionUploadServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import ConversionUploadServiceGrpcTransport
class ConversionUploadServiceClientMeta(type):
"""Metaclass for the ConversionUploadService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[ConversionUploadServiceTransport]]
_transport_registry["grpc"] = ConversionUploadServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[ConversionUploadServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class ConversionUploadServiceClient(
metaclass=ConversionUploadServiceClientMeta
):
"""Service to upload conversions."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConversionUploadServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ConversionUploadServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> ConversionUploadServiceTransport:
"""Return the transport used by the client instance.
Returns:
ConversionUploadServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def conversion_custom_variable_path(
customer_id: str, conversion_custom_variable_id: str,
) -> str:
"""Return a fully-qualified conversion_custom_variable string."""
return "customers/{customer_id}/conversionCustomVariables/{conversion_custom_variable_id}".format(
customer_id=customer_id,
conversion_custom_variable_id=conversion_custom_variable_id,
)
@staticmethod
def parse_conversion_custom_variable_path(path: str) -> Dict[str, str]:
"""Parse a conversion_custom_variable path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/conversionCustomVariables/(?P<conversion_custom_variable_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, ConversionUploadServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the conversion upload service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.ConversionUploadServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, ConversionUploadServiceTransport):
# transport is a ConversionUploadServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = ConversionUploadServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def upload_click_conversions(
self,
request: conversion_upload_service.UploadClickConversionsRequest = None,
*,
customer_id: str = None,
conversions: Sequence[conversion_upload_service.ClickConversion] = None,
partial_failure: bool = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> conversion_upload_service.UploadClickConversionsResponse:
r"""Processes the given click conversions.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `ConversionUploadError <>`__
`HeaderError <>`__ `InternalError <>`__
`PartialFailureError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.UploadClickConversionsRequest`):
The request object. Request message for
[ConversionUploadService.UploadClickConversions][google.ads.googleads.v8.services.ConversionUploadService.UploadClickConversions].
customer_id (:class:`str`):
Required. The ID of the customer
performing the upload.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
conversions (:class:`Sequence[google.ads.googleads.v8.services.types.ClickConversion]`):
Required. The conversions that are
being uploaded.
This corresponds to the ``conversions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
partial_failure (:class:`bool`):
Required. If true, successful
operations will be carried out and
invalid operations will return errors.
If false, all operations will be carried
out in one transaction if and only if
they are all valid. This should always
be set to true.
See
https://developers.google.com/google-
ads/api/docs/best-practices/partial-
failures for more information about
partial failure.
This corresponds to the ``partial_failure`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.services.types.UploadClickConversionsResponse:
Response message for
[ConversionUploadService.UploadClickConversions][google.ads.googleads.v8.services.ConversionUploadService.UploadClickConversions].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any(
[customer_id, conversions, partial_failure]
):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a conversion_upload_service.UploadClickConversionsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, conversion_upload_service.UploadClickConversionsRequest
):
request = conversion_upload_service.UploadClickConversionsRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if conversions is not None:
request.conversions = conversions
if partial_failure is not None:
request.partial_failure = partial_failure
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.upload_click_conversions
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def upload_call_conversions(
self,
request: conversion_upload_service.UploadCallConversionsRequest = None,
*,
customer_id: str = None,
conversions: Sequence[conversion_upload_service.CallConversion] = None,
partial_failure: bool = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> conversion_upload_service.UploadCallConversionsResponse:
r"""Processes the given call conversions.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `PartialFailureError <>`__
`QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.UploadCallConversionsRequest`):
The request object. Request message for
[ConversionUploadService.UploadCallConversions][google.ads.googleads.v8.services.ConversionUploadService.UploadCallConversions].
customer_id (:class:`str`):
Required. The ID of the customer
performing the upload.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
conversions (:class:`Sequence[google.ads.googleads.v8.services.types.CallConversion]`):
Required. The conversions that are
being uploaded.
This corresponds to the ``conversions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
partial_failure (:class:`bool`):
Required. If true, successful
operations will be carried out and
invalid operations will return errors.
If false, all operations will be carried
out in one transaction if and only if
they are all valid. This should always
be set to true.
See
https://developers.google.com/google-
ads/api/docs/best-practices/partial-
failures for more information about
partial failure.
This corresponds to the ``partial_failure`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.services.types.UploadCallConversionsResponse:
Response message for
[ConversionUploadService.UploadCallConversions][google.ads.googleads.v8.services.ConversionUploadService.UploadCallConversions].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any(
[customer_id, conversions, partial_failure]
):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a conversion_upload_service.UploadCallConversionsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, conversion_upload_service.UploadCallConversionsRequest
):
request = conversion_upload_service.UploadCallConversionsRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if conversions is not None:
request.conversions = conversions
if partial_failure is not None:
request.partial_failure = partial_failure
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.upload_call_conversions
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("ConversionUploadServiceClient",)
| googleads/google-ads-python | google/ads/googleads/v8/services/services/conversion_upload_service/client.py | Python | apache-2.0 | 25,206 | 0.001071 |
from __future__ import absolute_import
from django.contrib.auth.models import UserManager
from django.utils.timezone import now as timezone_now
from zerver.models import UserProfile, Recipient, Subscription, Realm, Stream
import base64
import ujson
import os
import string
from six.moves import range
from typing import Optional, Text
def random_api_key():
# type: () -> Text
choices = string.ascii_letters + string.digits
altchars = ''.join([choices[ord(os.urandom(1)) % 62] for _ in range(2)]).encode("utf-8")
return base64.b64encode(os.urandom(24), altchars=altchars).decode("utf-8")
# create_user_profile is based on Django's User.objects.create_user,
# except that we don't save to the database so it can used in
# bulk_creates
#
# Only use this for bulk_create -- for normal usage one should use
# create_user (below) which will also make the Subscription and
# Recipient objects
def create_user_profile(realm, email, password, active, bot_type, full_name,
short_name, bot_owner, is_mirror_dummy, tos_version,
timezone, tutorial_status=UserProfile.TUTORIAL_WAITING,
enter_sends=False):
# type: (Realm, Text, Optional[Text], bool, Optional[int], Text, Text, Optional[UserProfile], bool, Text, Optional[Text], Optional[Text], bool) -> UserProfile
now = timezone_now()
email = UserManager.normalize_email(email)
user_profile = UserProfile(email=email, is_staff=False, is_active=active,
full_name=full_name, short_name=short_name,
last_login=now, date_joined=now, realm=realm,
pointer=-1, is_bot=bool(bot_type), bot_type=bot_type,
bot_owner=bot_owner, is_mirror_dummy=is_mirror_dummy,
tos_version=tos_version, timezone=timezone,
tutorial_status=tutorial_status,
enter_sends=enter_sends,
onboarding_steps=ujson.dumps([]),
default_language=realm.default_language)
if bot_type or not active:
password = None
user_profile.set_password(password)
user_profile.api_key = random_api_key()
return user_profile
def create_user(email, password, realm, full_name, short_name,
active=True, bot_type=None, bot_owner=None, tos_version=None,
timezone=u"", avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
is_mirror_dummy=False, default_sending_stream=None,
default_events_register_stream=None,
default_all_public_streams=None, user_profile_id=None):
# type: (Text, Optional[Text], Realm, Text, Text, bool, Optional[int], Optional[UserProfile], Optional[Text], Text, Text, bool, Optional[Stream], Optional[Stream], Optional[bool], Optional[int]) -> UserProfile
user_profile = create_user_profile(realm, email, password, active, bot_type,
full_name, short_name, bot_owner,
is_mirror_dummy, tos_version, timezone)
user_profile.avatar_source = avatar_source
user_profile.timezone = timezone
user_profile.default_sending_stream = default_sending_stream
user_profile.default_events_register_stream = default_events_register_stream
# Allow the ORM default to be used if not provided
if default_all_public_streams is not None:
user_profile.default_all_public_streams = default_all_public_streams
if user_profile_id is not None:
user_profile.id = user_profile_id
user_profile.save()
recipient = Recipient.objects.create(type_id=user_profile.id,
type=Recipient.PERSONAL)
Subscription.objects.create(user_profile=user_profile, recipient=recipient)
return user_profile
| vabs22/zulip | zerver/lib/create_user.py | Python | apache-2.0 | 3,927 | 0.002546 |
import fnmatch
import glob
import os
import re
import tempfile
from gppylib import gplog
from gppylib.commands.base import WorkerPool, Command, REMOTE
from gppylib.commands.unix import Scp
from gppylib.db import dbconn
from gppylib.db.dbconn import execSQL
from gppylib.gparray import GpArray
from pygresql import pg
from gppylib.operations.utils import DEFAULT_NUM_WORKERS
import gzip
logger = gplog.get_default_logger()
def expand_partitions_and_populate_filter_file(dbname, partition_list, file_prefix):
expanded_partitions = expand_partition_tables(dbname, partition_list)
dump_partition_list = list(set(expanded_partitions + partition_list))
return create_temp_file_from_list(dump_partition_list, file_prefix)
def populate_filter_tables(table, rows, non_partition_tables, partition_leaves):
if not rows:
non_partition_tables.append(table)
else:
for (schema_name, partition_leaf_name) in rows:
partition_leaf = schema_name.strip() + '.' + partition_leaf_name.strip()
partition_leaves.append(partition_leaf)
return (non_partition_tables, partition_leaves)
def get_all_parent_tables(dbname):
SQL = "SELECT DISTINCT (schemaname || '.' || tablename) FROM pg_partitions"
data = []
with dbconn.connect(dbconn.DbURL(dbname=dbname)) as conn:
curs = dbconn.execSQL(conn, SQL)
data = curs.fetchall()
return set([d[0] for d in data])
def list_to_quoted_string(filter_tables):
filter_string = "'" + "', '".join([pg.escape_string(t) for t in filter_tables]) + "'"
return filter_string
def convert_parents_to_leafs(dbname, parents):
partition_leaves_sql = """
SELECT x.partitionschemaname || '.' || x.partitiontablename
FROM (
SELECT distinct schemaname, tablename, partitionschemaname, partitiontablename, partitionlevel
FROM pg_partitions
WHERE schemaname || '.' || tablename in (%s)
) as X,
(SELECT schemaname, tablename maxtable, max(partitionlevel) maxlevel
FROM pg_partitions
group by (tablename, schemaname)
) as Y
WHERE x.schemaname = y.schemaname and x.tablename = Y.maxtable and x.partitionlevel = Y.maxlevel;
"""
if not parents:
return []
conn = dbconn.connect(dbconn.DbURL(dbname=dbname))
partition_sql = partition_leaves_sql % list_to_quoted_string(parents)
curs = dbconn.execSQL(conn, partition_sql)
rows = curs.fetchall()
return [r[0] for r in rows]
#input: list of tables to be filtered
#output: same list but parent tables converted to leafs
def expand_partition_tables(dbname, filter_tables):
if not filter_tables or len(filter_tables) == 0:
return filter_tables
parent_tables = list()
non_parent_tables = list()
expanded_list = list()
all_parent_tables = get_all_parent_tables(dbname)
for table in filter_tables:
if table in all_parent_tables:
parent_tables.append(table)
else:
non_parent_tables.append(table)
expanded_list += non_parent_tables
local_batch_size = 1000
for (s, e) in get_batch_from_list(len(parent_tables), local_batch_size):
tmp = convert_parents_to_leafs(dbname, parent_tables[s:e])
expanded_list += tmp
return expanded_list
def get_batch_from_list(length, batch_size):
indices = []
for i in range(0, length, batch_size):
indices.append((i, i+batch_size))
return indices
def create_temp_file_from_list(entries, prefix):
"""
When writing the entries into temp file, don't do any strip as there might be
white space in schema name and table name.
"""
if len(entries) == 0:
return None
fd = tempfile.NamedTemporaryFile(mode='w', prefix=prefix, delete=False)
for entry in entries:
fd.write(entry + '\n')
tmp_file_name = fd.name
fd.close()
verify_lines_in_file(tmp_file_name, entries)
return tmp_file_name
def create_temp_file_with_tables(table_list):
return create_temp_file_from_list(table_list, 'table_list_')
def create_temp_file_with_schemas(schema_list):
return create_temp_file_from_list(schema_list, 'schema_file_')
def validate_timestamp(timestamp):
if not timestamp:
return False
if len(timestamp) != 14:
return False
if timestamp.isdigit():
return True
else:
return False
def check_successful_dump(report_file_contents):
for line in report_file_contents:
if line.strip() == 'gp_dump utility finished successfully.':
return True
return False
def get_ddboost_backup_directory():
"""
The gpddboost --show-config command, gives us all the ddboost \
configuration details.
Third line of the command output gives us the backup directory \
configured with ddboost.
"""
cmd_str = 'gpddboost --show-config'
cmd = Command('Get the ddboost backup directory', cmd_str)
cmd.run(validateAfter=True)
config = cmd.get_results().stdout.splitlines()
for line in config:
if line.strip().startswith("Default Backup Directory:"):
ddboost_dir = line.split(':')[-1].strip()
if ddboost_dir is None or ddboost_dir == "":
logger.error("Expecting format: Default Backup Directory:<dir>")
raise Exception("DDBOOST default backup directory is not configured. Or the format of the line has changed")
return ddboost_dir
logger.error("Could not find Default Backup Directory:<dir> in stdout")
raise Exception("Output: %s from command %s not in expected format." % (config, cmd_str))
# raise exception for bad data
def convert_reportfilename_to_cdatabasefilename(report_file, dump_prefix, ddboost=False):
(dirname, fname) = os.path.split(report_file)
timestamp = fname[-18:-4]
if ddboost:
dirname = get_ddboost_backup_directory()
dirname = "%s/%s" % (dirname, timestamp[0:8])
return "%s/%sgp_cdatabase_1_1_%s" % (dirname, dump_prefix, timestamp)
def get_lines_from_dd_file(filename):
cmd = Command('DDBoost copy of master dump file',
'gpddboost --readFile --from-file=%s'
% (filename))
cmd.run(validateAfter=True)
contents = cmd.get_results().stdout.splitlines()
return contents
def check_cdatabase_exists(dbname, report_file, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
try:
filename = convert_reportfilename_to_cdatabasefilename(report_file, dump_prefix, ddboost)
except Exception:
return False
if ddboost:
cdatabase_contents = get_lines_from_dd_file(filename)
elif netbackup_service_host:
restore_file_with_nbu(netbackup_service_host, netbackup_block_size, filename)
cdatabase_contents = get_lines_from_file(filename)
else:
cdatabase_contents = get_lines_from_file(filename)
dbname = escapeDoubleQuoteInSQLString(dbname, forceDoubleQuote=False)
for line in cdatabase_contents:
if 'CREATE DATABASE' in line:
dump_dbname = get_dbname_from_cdatabaseline(line)
if dump_dbname is None:
continue
else:
if dbname == checkAndRemoveEnclosingDoubleQuote(dump_dbname):
return True
return False
def get_dbname_from_cdatabaseline(line):
"""
Line format: CREATE DATABASE "DBNAME" WITH TEMPLATE = template0 ENCODING = 'UTF8' OWNER = gpadmin;
To get the dbname:
substring between the ending index of the first statement: CREATE DATABASE and the starting index
of WITH TEMPLATE whichever is not inside any double quotes, based on the fact that double quote
inside any name will be escaped by extra double quote, so there's always only one WITH TEMPLATE not
inside any doubles, means its previous and post string should have only even number of double
quotes.
Note: OWER name can also have special characters with double quote.
"""
cdatabase = "CREATE DATABASE "
try:
start = line.index(cdatabase)
except Exception as e:
logger.error('Failed to find substring %s in line %s, error: %s' % (cdatabase, line, str(e)))
return None
keyword = " WITH TEMPLATE = "
pos = get_nonquoted_keyword_index(line, keyword, '"', len(keyword))
if pos != -1:
dbname = line[start+len(cdatabase) : pos]
return dbname
return None
def get_nonquoted_keyword_index(line, keyword, quote, keyword_len):
# quote can be single quote or double quote
all_positions = get_all_occurrences(keyword, line)
if all_positions != None and len(all_positions) > 0:
for pos in all_positions:
pre_string = line[:pos]
post_string = line[pos + keyword_len:]
quotes_before = get_all_occurrences('%s' % quote, pre_string)
quotes_after = get_all_occurrences('%s' % quote, post_string)
num_quotes_before = 0 if (quotes_before is None or len(quotes_before) == 0) else len(quotes_before)
num_quotes_after = 0 if (quotes_after is None or len(quotes_after) == 0) else len(quotes_after)
if num_quotes_before % 2 == 0 and num_quotes_after % 2 == 0:
return pos
return -1
def get_all_occurrences(substr, line):
# substr is used for generating the pattern, escape those special chars in regexp
if substr is None or line is None or len(substr) > len(line):
return None
return [m.start() for m in re.finditer('(?=%s)' % substr, line)]
def get_type_ts_from_report_file(dbname, report_file, backup_type, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
report_file_contents = get_lines_from_file(report_file)
if not check_successful_dump(report_file_contents):
return None
if not check_cdatabase_exists(dbname, report_file, dump_prefix, ddboost, netbackup_service_host, netbackup_block_size):
return None
if check_backup_type(report_file_contents, backup_type):
return get_timestamp_val(report_file_contents)
return None
def get_full_ts_from_report_file(dbname, report_file, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
return get_type_ts_from_report_file(dbname, report_file, 'Full', dump_prefix, ddboost, netbackup_service_host, netbackup_block_size)
def get_incremental_ts_from_report_file(dbname, report_file, dump_prefix, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
return get_type_ts_from_report_file(dbname, report_file, 'Incremental', dump_prefix, ddboost, netbackup_service_host, netbackup_block_size)
def get_timestamp_val(report_file_contents):
for line in report_file_contents:
if line.startswith('Timestamp Key'):
timestamp = line.split(':')[-1].strip()
if not validate_timestamp(timestamp):
raise Exception('Invalid timestamp value found in report_file')
return timestamp
return None
def check_backup_type(report_file_contents, backup_type):
for line in report_file_contents:
if line.startswith('Backup Type'):
if line.split(':')[-1].strip() == backup_type:
return True
return False
def get_lines_from_zipped_file(fname):
"""
Don't strip white space here as it may be part of schema name and table name
"""
content = []
fd = gzip.open(fname, 'r')
try:
for line in fd:
content.append(line.strip('\n'))
except err:
raise Exception("Error reading from file %s: %s" % (fname, err))
finally:
fd.close()
return content
def get_lines_from_file(fname, ddboost=None):
"""
Don't strip white space here as it may be part of schema name and table name
"""
content = []
if ddboost:
contents = get_lines_from_dd_file(fname)
return contents
else:
with open(fname) as fd:
for line in fd:
content.append(line.strip('\n'))
return content
def write_lines_to_file(filename, lines):
"""
Don't do strip in line for white space in case it is part of schema name or table name
"""
with open(filename, 'w') as fp:
for line in lines:
fp.write("%s\n" % line.strip('\n'))
def verify_lines_in_file(fname, expected):
lines = get_lines_from_file(fname)
if lines != expected:
raise Exception("After writing file '%s' contents not as expected.\n"
"Lines read from file %s\n"
"Lines expected from file %s\n"
"Suspected IO error" % (fname, lines, expected))
def check_dir_writable(directory):
fp = None
try:
tmp_file = os.path.join(directory, 'tmp_file')
fp = open(tmp_file, 'w')
except IOError as e:
raise Exception('No write access permission on %s' % directory)
except Exception as e:
raise Exception(str(e))
finally:
if fp is not None:
fp.close()
if os.path.isfile(tmp_file):
os.remove(tmp_file)
def execute_sql(query, master_port, dbname):
dburl = dbconn.DbURL(port=master_port, dbname=dbname)
conn = dbconn.connect(dburl)
cursor = execSQL(conn, query)
return cursor.fetchall()
def get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp):
if backup_dir:
use_dir = backup_dir
elif master_data_dir:
use_dir = master_data_dir
else:
raise Exception("Can not locate backup directory with existing parameters")
if not timestamp:
raise Exception("Can not locate backup directory without timestamp")
if not validate_timestamp(timestamp):
raise Exception('Invalid timestamp: "%s"' % timestamp)
return "%s/%s/%s" % (use_dir, dump_dir, timestamp[0:8])
def generate_schema_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_schema" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_schema" % (use_dir, dump_prefix, timestamp)
def generate_report_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s.rpt" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s.rpt" % (use_dir, dump_prefix, timestamp)
def generate_increments_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_increments" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_increments" % (use_dir, dump_prefix, timestamp)
def generate_pgstatlastoperation_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_last_operation" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_last_operation" % (use_dir, dump_prefix, timestamp)
def generate_dirtytable_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_dirty_list" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_dirty_list" % (use_dir, dump_prefix, timestamp)
def generate_plan_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_restore_%s_plan" % (use_dir, dump_prefix, timestamp)
def generate_metadata_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_1_1_%s.gz" % (use_dir, dump_prefix, timestamp)
def generate_partition_list_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_table_list" % (use_dir, dump_prefix, timestamp)
def generate_ao_state_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_ao_state_file" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_ao_state_file" % (use_dir, dump_prefix, timestamp)
def generate_co_state_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return "%s/%s/%s/%sgp_dump_%s_co_state_file" % (master_data_dir, dump_dir, timestamp[0:8], dump_prefix, timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_dump_%s_co_state_file" % (use_dir, dump_prefix, timestamp)
def generate_files_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%sgp_dump_%s_regular_files' % (use_dir, dump_prefix, timestamp)
def generate_pipes_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%sgp_dump_%s_pipes' % (use_dir, dump_prefix, timestamp)
def generate_master_config_filename(dump_prefix, timestamp):
return '%sgp_master_config_files_%s.tar' % (dump_prefix, timestamp)
def generate_segment_config_filename(dump_prefix, segid, timestamp):
return '%sgp_segment_config_files_0_%d_%s.tar' % (dump_prefix, segid, timestamp)
def generate_filter_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%s%s_filter' % (use_dir, generate_dbdump_prefix(dump_prefix), timestamp)
def generate_global_prefix(dump_prefix):
return '%sgp_global_1_1_' % (dump_prefix)
def generate_master_dbdump_prefix(dump_prefix):
return '%sgp_dump_1_1_' % (dump_prefix)
def generate_master_status_prefix(dump_prefix):
return '%sgp_dump_status_1_1_' % (dump_prefix)
def generate_seg_dbdump_prefix(dump_prefix):
return '%sgp_dump_0_' % (dump_prefix)
def generate_seg_status_prefix(dump_prefix):
return '%sgp_dump_status_0_' % (dump_prefix)
def generate_dbdump_prefix(dump_prefix):
return '%sgp_dump_' % (dump_prefix)
def generate_createdb_prefix(dump_prefix):
return '%sgp_cdatabase_1_1_' % (dump_prefix)
def generate_stats_prefix(dump_prefix):
return '%sgp_statistics_1_1_' % (dump_prefix)
def generate_createdb_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp, ddboost=False):
if ddboost:
return '%s/%s/%s/%s%s' % (master_data_dir, dump_dir, timestamp[0:8], generate_createdb_prefix(dump_prefix), timestamp)
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return '%s/%s%s' % (use_dir, generate_createdb_prefix(dump_prefix), timestamp)
def get_dump_dirs(dump_dir_base, dump_dir):
dump_path = os.path.join(dump_dir_base, dump_dir)
if not os.path.isdir(dump_path):
return []
initial_list = os.listdir(dump_path)
initial_list = fnmatch.filter(initial_list, '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]')
dirnames = []
for d in initial_list:
pth = os.path.join(dump_path, d)
if os.path.isdir(pth):
dirnames.append(pth)
if len(dirnames) == 0:
return []
dirnames = sorted(dirnames, key=lambda x: int(os.path.basename(x)), reverse=True)
return dirnames
def get_latest_report_timestamp(backup_dir, dump_dir, dump_prefix):
dump_dirs = get_dump_dirs(backup_dir, dump_dir)
for d in dump_dirs:
latest = get_latest_report_in_dir(d, dump_prefix)
if latest:
return latest
return None
def get_latest_report_in_dir(report_dir, dump_prefix):
files = os.listdir(report_dir)
if len(files) == 0:
return None
dump_report_files = fnmatch.filter(files, '%sgp_dump_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].rpt' % dump_prefix)
if len(dump_report_files) == 0:
return None
dump_report_files = sorted(dump_report_files, key=lambda x: int(x.split('_')[-1].split('.')[0]), reverse=True)
return dump_report_files[0][-18:-4]
def get_timestamp_from_increments_filename(filename, dump_prefix):
fname = os.path.basename(filename)
parts = fname.split('_')
# Check for 4 underscores if there is no prefix, or more than 4 if there is a prefix
if not ((not dump_prefix and len(parts) == 4) or (dump_prefix and len(parts) > 4)):
raise Exception("Invalid increments file '%s' passed to get_timestamp_from_increments_filename" % filename)
return parts[-2].strip()
def get_full_timestamp_for_incremental(master_datadir, dump_dir, dump_prefix, incremental_timestamp, backup_dir=None, ddboost=False, netbackup_service_host=None, netbackup_block_size=None):
full_timestamp = None
if netbackup_service_host:
full_timestamp = get_full_timestamp_for_incremental_with_nbu(dump_prefix, incremental_timestamp, netbackup_service_host, netbackup_block_size)
else:
if ddboost:
backup_dir = master_datadir
else:
backup_dir = get_restore_dir(master_datadir, backup_dir)
pattern = '%s/%s/[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]/%sgp_dump_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]_increments' % (backup_dir, dump_dir, dump_prefix)
increments_files = glob.glob(pattern)
for increments_file in increments_files:
increment_ts = get_lines_from_file(increments_file)
if incremental_timestamp in increment_ts:
full_timestamp = get_timestamp_from_increments_filename(increments_file, dump_prefix)
break
if not full_timestamp:
raise Exception("Could not locate fullbackup associated with timestamp '%s'. Either increments file or full backup is missing." % incremental_timestamp)
return full_timestamp
# backup_dir will be either MDD or some other directory depending on call
def get_latest_full_dump_timestamp(dbname, backup_dir, dump_dir, dump_prefix, ddboost=False):
if not backup_dir:
raise Exception('Invalid None param to get_latest_full_dump_timestamp')
dump_dirs = get_dump_dirs(backup_dir, dump_dir)
for dump_dir in dump_dirs:
files = sorted(os.listdir(dump_dir))
if len(files) == 0:
logger.warn('Dump directory %s is empty' % dump_dir)
continue
dump_report_files = fnmatch.filter(files, '%sgp_dump_[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].rpt' % dump_prefix)
if len(dump_report_files) == 0:
logger.warn('No dump report files found in dump directory %s' % dump_dir)
continue
dump_report_files = sorted(dump_report_files, key=lambda x: int(x.split('_')[-1].split('.')[0]), reverse=True)
for dump_report_file in dump_report_files:
logger.debug('Checking for latest timestamp in report file %s' % os.path.join(dump_dir, dump_report_file))
timestamp = get_full_ts_from_report_file(dbname, os.path.join(dump_dir, dump_report_file), dump_prefix, ddboost)
logger.debug('Timestamp = %s' % timestamp)
if timestamp is not None:
return timestamp
raise Exception('No full backup found for incremental')
def get_all_segment_addresses(master_port):
gparray = GpArray.initFromCatalog(dbconn.DbURL(port=master_port), utility=True)
addresses = [seg.getSegmentAddress() for seg in gparray.getDbList() if seg.isSegmentPrimary(current_role=True)]
return list(set(addresses))
def scp_file_to_hosts(host_list, filename, batch_default):
pool = WorkerPool(numWorkers=min(len(host_list), batch_default))
for hname in host_list:
pool.addCommand(Scp('Copying table_filter_file to %s' % hname,
srcFile=filename,
dstFile=filename,
dstHost=hname))
pool.join()
pool.haltWork()
pool.check_results()
def run_pool_command(host_list, cmd_str, batch_default, check_results=True):
pool = WorkerPool(numWorkers=min(len(host_list), batch_default))
for host in host_list:
cmd = Command(host, cmd_str, ctxt=REMOTE, remoteHost=host)
pool.addCommand(cmd)
pool.join()
pool.haltWork()
if check_results:
pool.check_results()
def check_funny_chars_in_names(names, is_full_qualified_name=True):
"""
'\n' inside table name makes it hard to specify the object name in shell command line,
this may be worked around by using table file, but currently we read input line by line.
'!' inside table name will mess up with the shell history expansion.
',' is used for separating tables in plan file during incremental restore.
'.' dot is currently being used for full qualified table name in format: schema.table
"""
if names and len(names) > 0:
for name in names:
if ('\t' in name or '\n' in name or '!' in name or ',' in name or
(is_full_qualified_name and name.count('.') > 1) or (not is_full_qualified_name and name.count('.') > 0)):
raise Exception('Name has an invalid character "\\t" "\\n" "!" "," ".": "%s"' % name)
#Form and run command line to backup individual file with NBU
def backup_file_with_nbu(netbackup_service_host, netbackup_policy, netbackup_schedule, netbackup_block_size, netbackup_keyword, netbackup_filepath, hostname=None):
command_string = "cat %s | gp_bsa_dump_agent --netbackup-service-host %s --netbackup-policy %s --netbackup-schedule %s --netbackup-filename %s" % (netbackup_filepath, netbackup_service_host, netbackup_policy, netbackup_schedule, netbackup_filepath)
if netbackup_block_size is not None:
command_string += " --netbackup-block-size %s" % netbackup_block_size
if netbackup_keyword is not None:
command_string += " --netbackup-keyword %s" % netbackup_keyword
logger.debug("Command string inside 'backup_file_with_nbu': %s\n", command_string)
if hostname is None:
Command("dumping metadata files from master", command_string).run(validateAfter=True)
else:
Command("dumping metadata files from segment", command_string, ctxt=REMOTE, remoteHost=hostname).run(validateAfter=True)
logger.debug("Command ran successfully\n")
#Form and run command line to restore individual file with NBU
def restore_file_with_nbu(netbackup_service_host, netbackup_block_size, netbackup_filepath, hostname=None):
command_string = "gp_bsa_restore_agent --netbackup-service-host %s --netbackup-filename %s > %s" % (netbackup_service_host, netbackup_filepath, netbackup_filepath)
if netbackup_block_size is not None:
command_string += " --netbackup-block-size %s" % netbackup_block_size
logger.debug("Command string inside 'restore_file_with_nbu': %s\n", command_string)
if hostname is None:
Command("restoring metadata files to master", command_string).run(validateAfter=True)
else:
Command("restoring metadata files to segment", command_string, ctxt=REMOTE, remoteHost=hostname).run(validateAfter=True)
def check_file_dumped_with_nbu(netbackup_service_host, netbackup_filepath, hostname=None):
command_string = "gp_bsa_query_agent --netbackup-service-host %s --netbackup-filename %s" % (netbackup_service_host, netbackup_filepath)
logger.debug("Command string inside 'check_file_dumped_with_nbu': %s\n", command_string)
if hostname is None:
cmd = Command("Querying NetBackup server to check for dumped file", command_string)
else:
cmd = Command("Querying NetBackup server to check for dumped file", command_string, ctxt=REMOTE, remoteHost=hostname)
cmd.run(validateAfter=True)
if cmd.get_results().stdout.strip() == netbackup_filepath:
return True
else:
return False
def generate_global_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, dump_date, timestamp):
if backup_dir is not None:
dir_path = backup_dir
else:
dir_path = master_data_dir
return os.path.join(dir_path, dump_dir, dump_date, "%s%s" % (generate_global_prefix(dump_prefix), timestamp))
def generate_cdatabase_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, timestamp):
use_dir = get_backup_directory(master_data_dir, backup_dir, dump_dir, timestamp)
return "%s/%sgp_cdatabase_1_1_%s" % (use_dir, dump_prefix, timestamp)
def generate_stats_filename(master_data_dir, backup_dir, dump_dir, dump_prefix, dump_date, timestamp):
if backup_dir is not None:
dir_path = backup_dir
else:
dir_path = master_data_dir
return os.path.join(dir_path, dump_dir, dump_date, "%s%s" % (generate_stats_prefix(dump_prefix), timestamp))
def get_full_timestamp_for_incremental_with_nbu(dump_prefix, incremental_timestamp, netbackup_service_host, netbackup_block_size):
if dump_prefix:
get_inc_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=%sgp_dump_*_increments" % (netbackup_service_host, dump_prefix)
else:
get_inc_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=gp_dump_*_increments" % netbackup_service_host
cmd = Command("Query NetBackup server to get the list of increments files backed up", get_inc_files_cmd)
cmd.run(validateAfter=True)
files_list = cmd.get_results().stdout.split('\n')
for line in files_list:
fname = line.strip()
restore_file_with_nbu(netbackup_service_host, netbackup_block_size, fname)
contents = get_lines_from_file(fname)
if incremental_timestamp in contents:
full_timestamp = get_timestamp_from_increments_filename(fname, dump_prefix)
return full_timestamp
return None
def get_latest_full_ts_with_nbu(dbname, backup_dir, dump_prefix, netbackup_service_host, netbackup_block_size):
if dump_prefix:
get_rpt_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=%sgp_dump_*.rpt" % (netbackup_service_host, dump_prefix)
else:
get_rpt_files_cmd = "gp_bsa_query_agent --netbackup-service-host=%s --netbackup-list-dumped-objects=gp_dump_*.rpt" % netbackup_service_host
cmd = Command("Query NetBackup server to get the list of report files backed up", get_rpt_files_cmd)
cmd.run(validateAfter=True)
files_list = cmd.get_results().stdout.split('\n')
for line in files_list:
fname = line.strip()
if fname == '':
continue
if backup_dir not in fname:
continue
if ("No object matched the specified predicate" in fname) or ("No objects of the format" in fname):
return None
restore_file_with_nbu(netbackup_service_host, netbackup_block_size, fname)
timestamp = get_full_ts_from_report_file(dbname, fname, dump_prefix, netbackup_service_host=netbackup_service_host, netbackup_block_size=netbackup_block_size)
logger.debug('Timestamp = %s' % timestamp)
if timestamp is not None:
return timestamp
raise Exception('No full backup found for given incremental on the specified NetBackup server')
def getRows(dbname, exec_sql):
with dbconn.connect(dbconn.DbURL(dbname=dbname)) as conn:
curs = dbconn.execSQL(conn, exec_sql)
results = curs.fetchall()
return results
def check_schema_exists(schema_name, dbname):
schemaname = pg.escape_string(schema_name)
schema_check_sql = "select * from pg_catalog.pg_namespace where nspname='%s';" % schemaname
if len(getRows(dbname, schema_check_sql)) < 1:
return False
return True
def unescape_string(string):
if string:
string = string.replace('\\\\', '\\').replace("''", "'")
return string
def isDoubleQuoted(string):
if len(string) > 2 and string[0] == '"' and string[-1] == '"':
return True
return False
def checkAndRemoveEnclosingDoubleQuote(string):
if isDoubleQuoted(string):
string = string[1 : len(string) - 1]
return string
def checkAndAddEnclosingDoubleQuote(string):
if not isDoubleQuoted(string):
string = '"' + string + '"'
return string
def escapeDoubleQuoteInSQLString(string, forceDoubleQuote=True):
"""
Accept true database name, schema name, table name, escape the double quote
inside the name, add enclosing double quote by default.
"""
string = string.replace('"', '""')
if forceDoubleQuote:
string = '"' + string + '"'
return string
def removeEscapingDoubleQuoteInSQLString(string, forceDoubleQuote=True):
"""
Remove the escaping double quote in database/schema/table name.
"""
if string is None:
return string
string = string.replace('""', '"')
if forceDoubleQuote:
string = '"' + string + '"'
return string
def formatSQLString(rel_file, isTableName=False):
"""
Read the full qualified schema or table name, do a split
if each item is a table name into schema and table,
escape the double quote inside the name properly.
"""
relnames = []
if rel_file and os.path.exists(rel_file):
with open(rel_file, 'r') as fr:
lines = fr.read().strip('\n').split('\n')
for line in lines:
if isTableName:
schema, table = split_fqn(line)
schema = escapeDoubleQuoteInSQLString(schema)
table = escapeDoubleQuoteInSQLString(table)
relnames.append(schema + '.' + table)
else:
schema = escapeDoubleQuoteInSQLString(line)
relnames.append(schema)
if len(relnames) > 0:
tmp_file = create_temp_file_from_list(relnames, os.path.basename(rel_file))
return tmp_file
def split_fqn(fqn_name):
"""
Split full qualified table name into schema and table by separator '.',
"""
try:
schema, table = fqn_name.split('.')
except Exception as e:
logger.error("Failed to split name %s into schema and table, please check the format is schema.table" % fqn_name)
raise Exception('%s' % str(e))
return schema, table
def remove_file_on_segments(master_port, filename, batch_default=DEFAULT_NUM_WORKERS):
addresses = get_all_segment_addresses(master_port)
try:
cmd = 'rm -f %s' % filename
run_pool_command(addresses, cmd, batch_default, check_results=False)
except Exception as e:
logger.error("cleaning up file failed: %s" % e.__str__())
def get_restore_dir(data_dir, backup_dir):
if backup_dir is not None:
return backup_dir
else:
return data_dir
def get_table_info(line):
"""
It's complex to split when table name/schema name/user name/ tablespace name
contains full context of one of others', which is very unlikely, but in
case it happens, return None.
Since we only care about table name, type, and schema name, strip the input
is safe here.
line: contains the true (un-escaped) schema name, table name, and user name.
"""
COMMENT_EXPR = '-- Name: '
TYPE_EXPR = '; Type: '
SCHEMA_EXPR = '; Schema: '
OWNER_EXPR = '; Owner: '
TABLESPACE_EXPR = '; Tablespace: '
temp = line.strip('\n')
type_start = get_all_occurrences(TYPE_EXPR, temp)
schema_start = get_all_occurrences(SCHEMA_EXPR, temp)
owner_start = get_all_occurrences(OWNER_EXPR, temp)
tblspace_start = get_all_occurrences(TABLESPACE_EXPR, temp)
if len(type_start) != 1 or len(schema_start) != 1 or len(owner_start) != 1:
return (None, None, None, None)
name = temp[len(COMMENT_EXPR) : type_start[0]]
type = temp[type_start[0] + len(TYPE_EXPR) : schema_start[0]]
schema = temp[schema_start[0] + len(SCHEMA_EXPR) : owner_start[0]]
if not tblspace_start:
tblspace_start.append(None)
owner = temp[owner_start[0] + len(OWNER_EXPR) : tblspace_start[0]]
return (name, type, schema, owner)
def get_master_dump_file(master_datadir, backup_dir, dump_dir, timestamp, dump_prefix, ddboost):
"""
Generate the path to master dump file for ddboost, local cluster and netbackup dump, this function
does not generate path to other remote dump location.
Currently the netbackup and local dump both have same backup directory.
DDboost is different from netbackup & local dump
"""
dump_file_name = "%s%s" % (generate_master_dbdump_prefix(dump_prefix), timestamp)
if ddboost:
dump_file = os.path.join(dump_dir, timestamp[0:8], dump_file_name)
else:
dump_file = os.path.join(get_restore_dir(master_datadir, backup_dir), dump_dir, timestamp[0:8], dump_file_name)
return dump_file
| foyzur/gpdb | gpMgmt/bin/gppylib/operations/backup_utils.py | Python | apache-2.0 | 38,070 | 0.00633 |
# Copyright (C) 2014-2017 Red Hat Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from __future__ import absolute_import
import logging
from collections import deque
from uuid import uuid4
import functools
from vdsm import concurrent
from vdsm import constants
from vdsm import utils
from vdsm.config import config
from vdsm.common import api
from vdsm.common.compat import json
from vdsm.sslutils import SSLSocket
from . import JsonRpcClient, JsonRpcServer
from . import stomp
from .betterAsyncore import Dispatcher, Reactor
_STATE_LEN = "Waiting for message length"
_STATE_MSG = "Waiting for message"
def parseHeartBeatHeader(v):
try:
x, y = v.split(",", 1)
except ValueError:
x, y = (0, 0)
try:
x = int(x)
except ValueError:
x = 0
try:
y = int(y)
except ValueError:
y = 0
return (x, y)
class StompAdapterImpl(object):
log = logging.getLogger("Broker.StompAdapter")
"""
This class is responsible for stomp message processing
in the server side. It uses two dictionaries to track
request/response destinations.
sub_map - maps a destination id to _Subsctiption object
representing stomp subscription.
req_dest - maps a request id to a destination.
"""
def __init__(self, reactor, sub_map, req_dest):
self._reactor = reactor
self._outbox = deque()
self._sub_dests = sub_map
self._req_dest = req_dest
self._sub_ids = {}
request_queues = config.get('addresses', 'request_queues')
self.request_queues = request_queues.split(",")
self._commands = {
stomp.Command.CONNECT: self._cmd_connect,
stomp.Command.SEND: self._cmd_send,
stomp.Command.SUBSCRIBE: self._cmd_subscribe,
stomp.Command.UNSUBSCRIBE: self._cmd_unsubscribe,
stomp.Command.DISCONNECT: self._cmd_disconnect}
@property
def has_outgoing_messages(self):
return (len(self._outbox) > 0)
def peek_message(self):
return self._outbox[0]
def pop_message(self):
return self._outbox.popleft()
def queue_frame(self, frame):
self._outbox.append(frame)
def remove_subscriptions(self):
for sub in self._sub_ids.values():
self._remove_subscription(sub)
self._sub_ids.clear()
def _cmd_connect(self, dispatcher, frame):
self.log.info("Processing CONNECT request")
version = frame.headers.get(stomp.Headers.ACCEPT_VERSION, None)
if version != "1.2":
resp = stomp.Frame(
stomp.Command.ERROR,
None,
"Version unsupported"
)
else:
resp = stomp.Frame(stomp.Command.CONNECTED, {"version": "1.2"})
cx, cy = parseHeartBeatHeader(
frame.headers.get(stomp.Headers.HEARTEBEAT, "0,0")
)
# Make sure the heart-beat interval is sane
if cy != 0:
cy = max(cy, 1000)
# The server can send a heart-beat every cy ms and doesn't want
# to receive any heart-beat from the client.
resp.headers[stomp.Headers.HEARTEBEAT] = "%d,0" % (cy,)
dispatcher.setHeartBeat(cy)
self.queue_frame(resp)
self._reactor.wakeup()
def _cmd_subscribe(self, dispatcher, frame):
self.log.info("Subscribe command received")
destination = frame.headers.get("destination", None)
sub_id = frame.headers.get("id", None)
if not destination or not sub_id:
self._send_error("Missing destination or subscription id header",
dispatcher.connection)
return
if sub_id in self._sub_ids:
self._send_error("Subscription id already exists",
dispatcher.connection)
return
ack = frame.headers.get("ack", stomp.AckMode.AUTO)
subscription = stomp._Subscription(dispatcher.connection, destination,
sub_id, ack, None)
self._sub_dests[destination].append(subscription)
self._sub_ids[sub_id] = subscription
def _send_error(self, msg, connection):
res = stomp.Frame(
stomp.Command.ERROR,
None,
msg
)
connection.send_raw(res)
def _cmd_unsubscribe(self, dispatcher, frame):
self.log.info("Unsubscribe command received")
sub_id = frame.headers.get("id", None)
if not sub_id:
self._send_error("Missing id header",
dispatcher.connection)
return
try:
subscription = self._sub_ids.pop(sub_id)
except KeyError:
self.log.debug("No subscription for %s id",
sub_id)
return
else:
self._remove_subscription(subscription)
def _cmd_disconnect(self, dispatcher, frame):
self.log.info("Disconnect command received")
r_id = frame.headers[stomp.Headers.RECEIPT]
if not r_id:
self.log.debug("No receipt id for disconnect frame")
# it is not mandatory to send receipt frame
return
headers = {stomp.Headers.RECEIPT_ID: r_id}
dispatcher.connection.send_raw(stomp.Frame(stomp.Command.RECEIPT,
headers))
def _remove_subscription(self, subscription):
subs = self._sub_dests[subscription.destination]
if len(subs) == 1:
del self._sub_dests[subscription.destination]
else:
if subscription in subs:
subs.remove(subscription)
def _cmd_send(self, dispatcher, frame):
destination = frame.headers.get(stomp.Headers.DESTINATION, None)
# Get the list of all known subscribers.
subs = self.find_subscribers(destination)
# Forward the message to all explicit subscribers.
for subscription in subs:
self._forward_frame(subscription, frame)
# Is this a command that is meant to be answered
# by the internal implementation?
if any(destination == queue or destination.startswith(queue + ".")
for queue in self.request_queues):
self._handle_internal(dispatcher,
frame.headers.get(stomp.Headers.REPLY_TO),
frame.headers.get(stomp.Headers.FLOW_ID),
frame.body)
return
# This was not a command nor there were any subscribers,
# return an error!
if not subs:
self._send_error("Subscription not available",
dispatcher.connection)
def _forward_frame(self, subscription, frame):
"""
This method creates a new frame with the right body
and updated headers and forwards it to the subscriber.
"""
headers = {stomp.Headers.SUBSCRIPTION: subscription.id}
headers.update(frame.headers)
res = stomp.Frame(
stomp.Command.MESSAGE,
headers,
frame.body
)
subscription.client.send_raw(res)
def _handle_internal(self, dispatcher, req_dest, flow_id, request):
"""
We need to build response dictionary which maps message id
with destination. For legacy mode we use known 3.5 destination
or for standard mode we use 'reply-to' header.
"""
try:
self._handle_destination(dispatcher, req_dest, json.loads(request))
except Exception:
# let json server process issue
pass
dispatcher.connection.handleMessage(request, flow_id)
def _handle_destination(self, dispatcher, req_dest, request):
"""
We could receive single message or batch of messages. We need
to build response map for each message.
"""
if isinstance(request, list):
map(functools.partial(self._handle_destination, dispatcher,
req_dest),
request)
return
self._req_dest[request.get("id")] = req_dest
def handle_frame(self, dispatcher, frame):
try:
self._commands[frame.command](dispatcher, frame)
except KeyError:
self.log.warn("Unknown command %s", frame)
dispatcher.handle_error()
def find_subscribers(self, destination):
"""Return all subscribers that are interested in the destination
or its parents. Hierarchy is defined using dot as the separator.
"""
destination_segments = destination.split(".")
subscriptions = []
for parts in range(len(destination_segments)):
candidate_dest = ".".join(destination_segments[:parts + 1])
if candidate_dest in self._sub_dests:
subscriptions.extend(self._sub_dests[candidate_dest])
return subscriptions
class _StompConnection(object):
def __init__(self, server, aclient, sock, reactor):
self._reactor = reactor
self._server = server
self._messageHandler = None
self._async_client = aclient
self._dispatcher = reactor.create_dispatcher(
sock, stomp.AsyncDispatcher(self, aclient))
self._client_host = self._dispatcher.addr[0]
self._client_port = self._dispatcher.addr[1]
def send_raw(self, msg):
self._async_client.queue_frame(msg)
self._reactor.wakeup()
def setTimeout(self, timeout):
self._dispatcher.socket.settimeout(timeout)
def connect(self):
pass
def close(self):
self._dispatcher.close()
if hasattr(self._async_client, 'remove_subscriptions'):
self._async_client.remove_subscriptions()
def get_local_address(self):
return self._dispatcher.socket.getsockname()[0]
def set_message_handler(self, msgHandler):
self._messageHandler = msgHandler
self._dispatcher.handle_read_event()
def handleMessage(self, data, flow_id):
if self._messageHandler is not None:
context = api.Context(flow_id, self._client_host,
self._client_port)
self._messageHandler((self._server, self.get_local_address(),
context, data))
def is_closed(self):
return not self._dispatcher.connected
class StompServer(object):
log = logging.getLogger("yajsonrpc.StompServer")
def __init__(self, reactor, subscriptions):
self._reactor = reactor
self._messageHandler = None
self._sub_map = subscriptions
self._req_dest = {}
def add_client(self, sock):
adapter = StompAdapterImpl(self._reactor, self._sub_map,
self._req_dest)
return _StompConnection(self, adapter, sock,
self._reactor)
"""
Sends message to all subscribes that subscribed to destination.
"""
def send(self, message, destination=stomp.SUBSCRIPTION_ID_RESPONSE):
resp = json.loads(message)
if not isinstance(resp, dict):
raise ValueError(
'Provided message %s failed parsing to dictionary' % message)
# pylint: disable=no-member
response_id = resp.get("id")
try:
destination = self._req_dest[response_id]
del self._req_dest[response_id]
except KeyError:
# we could have no reply-to or we could send events (no message id)
pass
try:
connections = self._sub_map[destination]
except KeyError:
self.log.warn("Attempt to reply to unknown destination %s",
destination)
return
for connection in connections:
res = stomp.Frame(
stomp.Command.MESSAGE,
{
stomp.Headers.DESTINATION: destination,
stomp.Headers.CONTENT_TYPE: "application/json",
stomp.Headers.SUBSCRIPTION: connection.id
},
message
)
# we need to check whether the channel is not closed
if not connection.client.is_closed():
connection.client.send_raw(res)
class StompClient(object):
log = logging.getLogger("jsonrpc.AsyncoreClient")
"""
We create a client by providing socket used for communication.
Reactor object responsible for processing I/O and flag
which tells client whether it should manage reactor's
life cycle (by default set to True).
"""
def __init__(self, sock, reactor, owns_reactor=True):
self._reactor = reactor
self._owns_reactor = owns_reactor
self._messageHandler = None
self._socket = sock
self._aclient = stomp.AsyncClient()
self._stompConn = _StompConnection(
self,
self._aclient,
sock,
reactor
)
self._aclient.handle_connect()
def setTimeout(self, timeout):
self._stompConn.setTimeout(timeout)
def connect(self):
self._stompConn.connect()
def handle_message(self, sub, frame):
if self._messageHandler is not None:
self._messageHandler((self, frame.body))
def set_message_handler(self, msgHandler):
self._messageHandler = msgHandler
self.check_read()
def check_read(self):
if isinstance(self._socket, SSLSocket) and self._socket.pending() > 0:
self._stompConn._dispatcher.handle_read()
def subscribe(self, *args, **kwargs):
sub = self._aclient.subscribe(*args, **kwargs)
self._reactor.wakeup()
return sub
def unsubscribe(self, sub):
self._aclient.unsubscribe(sub)
def send(self, message, destination=stomp.SUBSCRIPTION_ID_RESPONSE,
headers=None):
self.log.debug("Sending response")
if self._stompConn.is_closed():
raise stomp.Disconnected()
self._aclient.send(
destination,
message,
headers
)
self._reactor.wakeup()
def close(self):
self._stompConn.close()
if self._owns_reactor:
self._reactor.stop()
def StompListener(reactor, server, acceptHandler, connected_socket):
impl = StompListenerImpl(server, acceptHandler, connected_socket)
return Dispatcher(impl, connected_socket, map=reactor._map)
# FIXME: We should go about making a listener wrapper like the client wrapper
# This is not as high priority as users don't interact with listeners
# as much
class StompListenerImpl(object):
log = logging.getLogger("jsonrpc.StompListener")
def __init__(self, server, acceptHandler, connected_socket):
self._socket = connected_socket
self._acceptHandler = acceptHandler
self._server = server
def init(self, dispatcher):
dispatcher.set_reuse_addr()
conn = self._server.add_client(self._socket)
self._acceptHandler(conn)
def writable(self, dispatcher):
return False
class StompReactor(object):
def __init__(self, subs):
self._reactor = Reactor()
self._server = StompServer(self._reactor, subs)
def createListener(self, connected_socket, acceptHandler):
listener = StompListener(
self._reactor,
self._server,
acceptHandler,
connected_socket
)
self._reactor.wakeup()
return listener
@property
def server(self):
return self._server
def createClient(self, connected_socket, owns_reactor=False):
return StompClient(connected_socket, self._reactor,
owns_reactor=owns_reactor)
def process_requests(self):
self._reactor.process_requests()
def stop(self):
self._reactor.stop()
class StompDetector():
log = logging.getLogger("protocoldetector.StompDetector")
NAME = "stomp"
REQUIRED_SIZE = max(len(s) for s in stomp.COMMANDS)
def __init__(self, json_binding):
self.json_binding = json_binding
self._reactor = self.json_binding.reactor
def detect(self, data):
return data.startswith(stomp.COMMANDS)
def handle_socket(self, client_socket, socket_address):
self.json_binding.add_socket(self._reactor, client_socket)
self.log.debug("Stomp detected from %s", socket_address)
class ServerRpcContextAdapter(object):
"""
Adapter is responsible for passing received messages from the broker
to instance of a JsonRpcServer and adds 'reply_to' header to a frame
before sending it.
"""
@classmethod
def subscription_handler(cls, server, address):
def handler(sub, frame):
server.queueRequest(
(
ServerRpcContextAdapter(sub.client, frame, address),
frame.body
)
)
return handler
def __init__(self, client, request_frame, address):
self._address = address
self._client = client
self._reply_to = request_frame.headers.get('reply-to', None)
def get_local_address(self, *args, **kwargs):
return self._address
def send(self, data):
if self._reply_to:
self._client.send(
self._reply_to,
data,
{
"content-type": "application/json",
}
)
class ClientRpcTransportAdapter(object):
def __init__(self, sub, destination, client):
self._sub = sub
sub.set_message_handler(self._handle_message)
self._destination = destination
self._client = client
self._message_handler = lambda arg: None
"""
In order to process message we need to set message
handler which is responsible for processing jsonrpc
content of the message. Currently there are 2 handlers:
JsonRpcClient and JsonRpcServer.
"""
def set_message_handler(self, handler):
self._message_handler = handler
def send(self, data, destination=None):
if not destination:
destination = self._destination
headers = {
"content-type": "application/json",
"reply-to": self._sub.destination,
}
self._client.send(
data,
destination,
headers,
)
def subscribe(self, queue_name):
return self._client.subscribe(queue_name, sub_id=str(uuid4()))
def unsubscribe(self, sub):
self._client.unsubscribe(sub)
def _handle_message(self, sub, frame):
self._message_handler((self, frame.body))
def close(self):
self._sub.unsubscribe()
self._client.close()
def StompRpcClient(stomp_client, request_queue, response_queue):
return JsonRpcClient(
ClientRpcTransportAdapter(
stomp_client.subscribe(response_queue, sub_id=str(uuid4())),
request_queue,
stomp_client,
)
)
def SimpleClient(host, port=54321, ssl=True):
"""
Returns JsonRpcClient able to receive jsonrpc messages and notifications.
It is required to provide a host where we want to connect, port and whether
we want to use ssl (True by default). Other settings use defaults and if
there is a need to customize please use StandAloneRpcClient().
"""
sslctx = None
if ssl:
from vdsm.sslutils import SSLContext
sslctx = SSLContext(key_file=constants.KEY_FILE,
cert_file=constants.CERT_FILE,
ca_certs=constants.CA_FILE)
return StandAloneRpcClient(host, port, "jms.topic.vdsm_requests",
str(uuid4()), sslctx, lazy_start=False)
def StandAloneRpcClient(host, port, request_queue, response_queue,
sslctx=None, lazy_start=True):
"""
Returns JsonRpcClient able to receive jsonrpc messages and notifications.
It is required to provide host and port where we want to connect and
request and response queues that we want to use during communication.
We can provide ssl context if we want to secure connection.
"""
reactor = Reactor()
def start():
thread = concurrent.thread(reactor.process_requests,
name='Client %s:%s' % (host, port))
thread.start()
client = StompClient(utils.create_connected_socket(host, port, sslctx),
reactor)
jsonclient = JsonRpcClient(
ClientRpcTransportAdapter(
client.subscribe(response_queue, sub_id=str(uuid4())),
request_queue,
client)
)
if lazy_start:
setattr(jsonclient, 'start', start)
else:
start()
return jsonclient
def StompRpcServer(bridge, stomp_client, request_queue, address, timeout, cif):
server = JsonRpcServer(bridge, timeout, cif)
return stomp_client.subscribe(
request_queue,
message_handler=ServerRpcContextAdapter.subscription_handler(server,
address)
)
| EdDev/vdsm | lib/yajsonrpc/stompreactor.py | Python | gpl-2.0 | 22,060 | 0 |
# -*- coding: utf-8 -*-
"""Storage for ports. Set defaults here, then :py:mod:`fixtures.portset` will make overrides."""
import sys
from cfme.utils import clear_property_cache
from cfme.utils.log import logger
class Ports(object):
SSH = 22
DB = 5432
TOWER = 54322
logger = logger
@property
def _top(self, m=sys.modules):
mod = m.get('utils.appliance')
return mod and mod.stack.top
def __setattr__(self, attr, value):
super(self.__class__, self).__setattr__(attr, value)
if self._top is not None:
self.logger.info("Invalidating lazy_cache ssh_client current_appliance object")
clear_property_cache(self._top, 'ssh_client')
sys.modules[__name__] = Ports()
| Yadnyawalkya/integration_tests | cfme/utils/ports.py | Python | gpl-2.0 | 747 | 0.002677 |
import re
import Queue
class AbsAnalyst(object):
"""docstring for AbsAnalyst"""
LOGTIME_REGEXP = re.compile("(?P<log_time>\w{4}-\w{2}-\w{2} \w{2}:\w{2}:\w{2})")
def __init__(self):
raise NotImplemented
def isMatch(self, line):
raise NotImplemented
def doStatistic(self):
raise NotImplemented
def doAnalyse(self):
raise NotImplemented | DrZhang/LogAnalyst | Analysts/AbsAnalyst.py | Python | lgpl-3.0 | 353 | 0.05949 |
# -*- coding: utf-8 -*-
##############################################################
# Module Writen For Odoo, Open Source Management Solution
#
# Copyright (c) 2011 Vauxoo - http://www.vauxoo.com
# All Rights Reserved.
# info Vauxoo (info@vauxoo.com)
# coded by: moylop260@vauxoo.com
############################################################################
from . import runbot_repo
from . import runbot_build
| open-synergy/runbot-addons | runbot_language/models/__init__.py | Python | agpl-3.0 | 433 | 0 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Interact with AWS Redshift clusters."""
import warnings
from airflow.providers.amazon.aws.hooks.redshift_cluster import RedshiftHook
from airflow.providers.amazon.aws.hooks.redshift_sql import RedshiftSQLHook
warnings.warn(
"This module is deprecated. Please use `airflow.providers.amazon.aws.hooks.redshift_cluster` "
"or `airflow.providers.amazon.aws.hooks.redshift_sql` as appropriate.",
DeprecationWarning,
stacklevel=2,
)
__all__ = ["RedshiftHook", "RedshiftSQLHook"]
| Acehaidrey/incubator-airflow | airflow/providers/amazon/aws/hooks/redshift.py | Python | apache-2.0 | 1,282 | 0.00078 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Ipv6ExpressRouteCircuitPeeringConfig(Model):
"""Contains IPv6 peering config.
:param primary_peer_address_prefix: The primary address prefix.
:type primary_peer_address_prefix: str
:param secondary_peer_address_prefix: The secondary address prefix.
:type secondary_peer_address_prefix: str
:param microsoft_peering_config: The Microsoft peering configuration.
:type microsoft_peering_config:
~azure.mgmt.network.v2017_09_01.models.ExpressRouteCircuitPeeringConfig
:param route_filter: The reference of the RouteFilter resource.
:type route_filter: ~azure.mgmt.network.v2017_09_01.models.RouteFilter
:param state: The state of peering. Possible values are: 'Disabled' and
'Enabled'. Possible values include: 'Disabled', 'Enabled'
:type state: str or
~azure.mgmt.network.v2017_09_01.models.ExpressRouteCircuitPeeringState
"""
_attribute_map = {
'primary_peer_address_prefix': {'key': 'primaryPeerAddressPrefix', 'type': 'str'},
'secondary_peer_address_prefix': {'key': 'secondaryPeerAddressPrefix', 'type': 'str'},
'microsoft_peering_config': {'key': 'microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'},
'route_filter': {'key': 'routeFilter', 'type': 'RouteFilter'},
'state': {'key': 'state', 'type': 'str'},
}
def __init__(self, primary_peer_address_prefix=None, secondary_peer_address_prefix=None, microsoft_peering_config=None, route_filter=None, state=None):
super(Ipv6ExpressRouteCircuitPeeringConfig, self).__init__()
self.primary_peer_address_prefix = primary_peer_address_prefix
self.secondary_peer_address_prefix = secondary_peer_address_prefix
self.microsoft_peering_config = microsoft_peering_config
self.route_filter = route_filter
self.state = state
| AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/ipv6_express_route_circuit_peering_config.py | Python | mit | 2,366 | 0.001691 |
from .views import IndexView
from django.conf.urls import url
urlpatterns = [
url(r'^$', IndexView.as_view(), name='index'),
]
| alessandroHenrique/coinpricemonitor | dashboard/urls.py | Python | mit | 134 | 0 |
from pprint import pformat
import boto3
from clint.textui import indent, puts, prompt
def _get_instance_details(instance):
return {
'tags': instance.tags,
'launch_time': instance.launch_time.isoformat(),
'instance_type': instance.instance_type,
'state': instance.state,
'key_name': instance.key_name,
'public_dns_name': instance.public_dns_name,
'private_dns_name': instance.private_dns_name,
'placement': instance.placement,
}
def terminate(args, config):
ec2 = boto3.resource('ec2')
instance_id = args.instance_id
instance = ec2.Instance(instance_id)
puts("Instance details:")
with indent(4):
puts(pformat(_get_instance_details(instance)))
confirm = prompt.query("Terminate instance {}? (y/n)".format(instance_id), validators=[])
if confirm is "y":
instance.terminate()
puts("termination request issued")
else:
puts("aborted")
def _register_terminate(subparsers):
parser_terminate = subparsers.add_parser('terminate', help='terminate help')
parser_terminate.add_argument('instance_id',
type=str,
help='id of the instance to terminate')
| lchi/blimp | blimp/commands/terminate.py | Python | mit | 1,260 | 0.001587 |
#
# This file is part of Gruvi. Gruvi is free software available under the
# terms of the MIT license. See the file "LICENSE" that was provided
# together with this source file for the licensing terms.
#
# Copyright (c) 2012-2017 the Gruvi authors. See the file "AUTHORS" for a
# complete list.
from __future__ import absolute_import, print_function
import os
import json
import unittest
import six
import gruvi
from gruvi import jsonrpc
from gruvi.jsonrpc import JsonRpcError, JsonRpcVersion
from gruvi.jsonrpc import JsonRpcProtocol, JsonRpcClient, JsonRpcServer
from gruvi.jsonrpc_ffi import ffi as _ffi, lib as _lib
from gruvi.transports import TransportError
from support import UnitTest, MockTransport
_keepalive = None
def set_buffer(ctx, buf):
global _keepalive # See note in JsonRpcProtocol
_keepalive = ctx.buf = _ffi.from_buffer(buf)
ctx.buflen = len(buf)
ctx.offset = 0
def split_string(s):
ctx = _ffi.new('struct split_context *')
set_buffer(ctx, s)
_lib.json_split(ctx)
return ctx
JsonRpcProtocol.default_version = '1.0'
class TestJsonSplitter(UnitTest):
def test_simple(self):
r = b'{ "foo": "bar" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r))
def test_leading_whitespace(self):
r = b' { "foo": "bar" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r))
r = b' \t\n{ "foo": "bar" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r))
def test_trailing_whitespace(self):
r = b'{ "foo": "bar" } '
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r)-1)
error = _lib.json_split(ctx)
self.assertEqual(error, ctx.error) == _lib.INCOMPLETE
self.assertEqual(ctx.offset, len(r))
def test_brace_in_string(self):
r = b'{ "foo": "b{r" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r))
r = b'{ "foo": "b}r" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r))
def test_string_escape(self):
r = b'{ "foo": "b\\"}" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, len(r))
def test_error(self):
r = b' x { "foo": "bar" }'
ctx = split_string(r)
self.assertEqual(ctx.error, _lib.ERROR)
self.assertEqual(ctx.offset, 1)
r = b'[ { "foo": "bar" } ]'
ctx = split_string(r)
self.assertEqual(ctx.error, _lib.ERROR)
self.assertEqual(ctx.offset, 0)
def test_multiple(self):
r = b'{ "foo": "bar" } { "baz": "qux" }'
ctx = split_string(r)
self.assertEqual(ctx.error, 0)
self.assertEqual(ctx.offset, 16)
error = _lib.json_split(ctx)
self.assertEqual(error, ctx.error) == 0
self.assertEqual(ctx.offset, len(r))
def test_incremental(self):
r = b'{ "foo": "bar" }'
ctx = _ffi.new('struct split_context *')
for i in range(len(r)-1):
set_buffer(ctx, r[i:i+1])
error = _lib.json_split(ctx)
self.assertEqual(error, ctx.error) == _lib.INCOMPLETE
self.assertEqual(ctx.offset, 1)
set_buffer(ctx, r[-1:])
error = _lib.json_split(ctx)
self.assertEqual(error, ctx.error) == 0
self.assertEqual(ctx.offset, 1)
class TestJsonRpcV1(UnitTest):
def setUp(self):
super(TestJsonRpcV1, self).setUp()
self.version = JsonRpcVersion.create('1.0')
def test_check_request(self):
v = self.version
msg = {'id': 1, 'method': 'foo', 'params': []}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
msg = {'id': None, 'method': 'foo', 'params': []}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
def test_check_request_missing_id(self):
v = self.version
msg = {'method': 'foo', 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_missing_method(self):
v = self.version
msg = {'id': 1, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_illegal_method(self):
v = self.version
msg = {'id': 1, 'method': None, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 1, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': {}, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': [], 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': [1], 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_missing_params(self):
v = self.version
msg = {'id': 1, 'method': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_illegal_params(self):
v = self.version
msg = {'id': 1, 'method': 'foo', 'params': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 'foo', 'params': 1}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 'foo', 'params': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'method': 'foo', 'params': {}}
def test_check_request_extraneous_fields(self):
v = self.version
msg = {'id': 1, 'method': 'foo', 'params': [], 'bar': 'baz'}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response(self):
v = self.version
msg = {'id': 1, 'result': 'foo', 'error': None}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_null_result(self):
v = self.version
msg = {'id': 1, 'result': None, 'error': None}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_error(self):
v = self.version
msg = {'id': 1, 'result': None, 'error': {'code': 1}}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_missing_id(self):
v = self.version
msg = {'result': 'foo', 'error': None}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_missing_result(self):
v = self.version
msg = {'id': 1, 'error': None}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_missing_error(self):
v = self.version
msg = {'id': 1, 'result': None}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_illegal_error(self):
v = self.version
msg = {'id': 1, 'result': None, 'error': 1}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'result': None, 'error': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'id': 1, 'result': None, 'error': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_result_error_both_set(self):
v = self.version
msg = {'id': 1, 'result': 1, 'error': 0}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_extraneous_fields(self):
v = self.version
msg = {'id': 1, 'result': 1, 'error': None, 'bar': 'baz'}
self.assertRaises(ValueError, v.check_message, msg)
def test_create_request(self):
v = self.version
msg = v.create_request('foo', [])
self.assertIsInstance(msg['id'], six.string_types)
self.assertEqual(msg['method'], 'foo')
self.assertEqual(msg['params'], [])
self.assertEqual(len(msg), 3)
def test_create_request_notification(self):
v = self.version
msg = v.create_request('foo', [], notification=True)
self.assertIsNone(msg['id'])
self.assertEqual(msg['method'], 'foo')
self.assertEqual(msg['params'], [])
self.assertEqual(len(msg), 3)
def test_create_response(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, 1)
self.assertEqual(msg['id'], req['id'])
self.assertEqual(msg['result'], 1)
self.assertIsNone(msg['error'])
self.assertEqual(len(msg), 3)
def test_create_response_null_result(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, None)
self.assertEqual(msg['id'], req['id'])
self.assertIsNone(msg['result'])
self.assertIsNone(msg['error'])
self.assertEqual(len(msg), 3)
def test_create_response_error(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, error={'code': 1})
self.assertEqual(msg['id'], req['id'])
self.assertIsNone(msg['result'])
self.assertEqual(msg['error'], {'code': 1})
self.assertEqual(len(msg), 3)
class TestJsonRpcV2(UnitTest):
def setUp(self):
super(TestJsonRpcV2, self).setUp()
self.version = JsonRpcVersion.create('2.0')
def test_check_request(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': []}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': {}}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
def test_check_request_notification(self):
v = self.version
msg = {'jsonrpc': '2.0', 'method': 'foo', 'params': []}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
msg = {'jsonrpc': '2.0', 'method': 'foo', 'params': {}}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
def test_check_request_missing_version(self):
v = self.version
msg = {'id': 1, 'method': 'foo', 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_missing_method(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_illegal_method(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': None, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 1, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': {}, 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': [], 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': [1], 'params': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_request_missing_params(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo'}
self.assertEqual(v.check_message(msg), jsonrpc.REQUEST)
def test_check_request_illegal_params(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': 1}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': 'foo'}
def test_check_request_extraneous_fields(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'method': 'foo', 'params': [], 'bar': 'baz'}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'result': 'foo'}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_null_result(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'result': None}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_error(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'error': {'code': 1}}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_missing_id(self):
v = self.version
msg = {'jsonrpc': '2.0', 'result': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_null_id(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': None, 'result': 'foo'}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_error_missing_id(self):
v = self.version
msg = {'jsonrpc': '2.0', 'error': {'code': 10}}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_error_null_id(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': None, 'error': {'code': 1}}
self.assertEqual(v.check_message(msg), jsonrpc.RESPONSE)
def test_check_response_missing_result_and_error(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_illegal_error(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'error': 1}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'error': 'foo'}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'error': []}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_result_error_both_present(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'result': None, 'error': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'result': 1, 'error': None}
self.assertRaises(ValueError, v.check_message, msg)
msg = {'jsonrpc': '2.0', 'id': 1, 'result': None, 'error': {'code': 10}}
self.assertRaises(ValueError, v.check_message, msg)
def test_check_response_extraneous_fields(self):
v = self.version
msg = {'jsonrpc': '2.0', 'id': 1, 'result': 1, 'error': None, 'bar': 'baz'}
self.assertRaises(ValueError, v.check_message, msg)
def test_create_request(self):
v = self.version
msg = v.create_request('foo', [])
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertIsInstance(msg['id'], six.string_types)
self.assertEqual(msg['method'], 'foo')
self.assertEqual(msg['params'], [])
self.assertEqual(len(msg), 4)
def test_create_request_notification(self):
v = self.version
msg = v.create_request('foo', [], notification=True)
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertNotIn('id', msg)
self.assertEqual(msg['method'], 'foo')
self.assertEqual(msg['params'], [])
self.assertEqual(len(msg), 3)
def test_create_response(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, 1)
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertEqual(msg['id'], req['id'])
self.assertEqual(msg['result'], 1)
self.assertNotIn('error', msg)
self.assertEqual(len(msg), 3)
def test_create_response_null_result(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, None)
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertEqual(msg['id'], req['id'])
self.assertIsNone(msg['result'])
self.assertNotIn('error', msg)
self.assertEqual(len(msg), 3)
def test_create_response_error(self):
v = self.version
req = {'id': 'gruvi.0'}
msg = v.create_response(req, error={'code': 1})
self.assertEqual(msg['jsonrpc'], '2.0')
self.assertEqual(msg['id'], req['id'])
self.assertNotIn('result', msg)
self.assertEqual(msg['error'], {'code': 1})
self.assertEqual(len(msg), 3)
class TestJsonRpcProtocol(UnitTest):
def setUp(self):
super(TestJsonRpcProtocol, self).setUp()
self.transport = MockTransport()
self.protocol = JsonRpcProtocol(self.message_handler)
self.transport.start(self.protocol)
self.messages = []
self.protocols = []
def message_handler(self, message, transport, protocol):
self.messages.append(message)
self.protocols.append(protocol)
def get_messages(self):
# run dispatcher thread so that it calls our message handler
gruvi.sleep(0)
return self.messages
def test_simple(self):
m = b'{ "id": "1", "method": "foo", "params": [] }'
proto = self.protocol
proto.data_received(m)
mm = self.get_messages()
self.assertEqual(len(mm), 1)
self.assertIsInstance(mm[0], dict)
self.assertEqual(mm[0], {'id': '1', 'method': 'foo', 'params': []})
pp = self.protocols
self.assertEqual(len(pp), 1)
self.assertIs(pp[0], proto)
def test_multiple(self):
m = b'{ "id": "1", "method": "foo", "params": [] }' \
b'{ "id": "2", "method": "bar", "params": [] }'
proto = self.protocol
proto.data_received(m)
mm = self.get_messages()
self.assertEqual(len(mm), 2)
self.assertEqual(mm[0], {'id': '1', 'method': 'foo', 'params': []})
self.assertEqual(mm[1], {'id': '2', 'method': 'bar', 'params': []})
pp = self.protocols
self.assertEqual(len(pp), 2)
self.assertIs(pp[0], proto)
self.assertIs(pp[1], proto)
def test_whitespace(self):
m = b' { "id": "1", "method": "foo", "params": [] }' \
b' { "id": "2", "method": "bar", "params": [] }'
proto = self.protocol
proto.data_received(m)
mm = self.get_messages()
self.assertEqual(len(mm), 2)
self.assertEqual(mm[0], {'id': '1', 'method': 'foo', 'params': []})
self.assertEqual(mm[1], {'id': '2', 'method': 'bar', 'params': []})
def test_incremental(self):
m = b'{ "id": "1", "method": "foo", "params": [] }'
proto = self.protocol
for i in range(len(m)-1):
proto.data_received(m[i:i+1])
self.assertEqual(self.get_messages(), [])
proto.data_received(m[-1:])
mm = self.get_messages()
self.assertEqual(len(mm), 1)
self.assertEqual(mm[0], {'id': '1', 'method': 'foo', "params": []})
def test_framing_error(self):
m = b'xxx'
proto = self.protocol
proto.data_received(m)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError)
def test_encoding_error(self):
m = b'{ xxx\xff }'
proto = self.protocol
proto.data_received(m)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError)
def test_illegal_json(self):
m = b'{ "xxxx" }'
proto = self.protocol
proto.data_received(m)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError)
def test_illegal_jsonrpc(self):
m = b'{ "xxxx": "yyyy" }'
proto = self.protocol
proto.data_received(m)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError)
def test_maximum_message_size_exceeded(self):
proto = self.protocol
proto.max_message_size = 100
message = {'id': 1, 'method': 'foo', 'params': ['x'*100]}
message = json.dumps(message).encode('utf8')
self.assertGreater(len(message), proto.max_message_size)
proto.data_received(message)
self.assertEqual(self.get_messages(), [])
self.assertIsInstance(proto._error, JsonRpcError)
def test_flow_control(self):
# Write more messages than the protocol is willing to pipeline. Flow
# control should kick in and alternate scheduling of the producer and
# the consumer.
proto, trans = self.protocol, self.transport
self.assertTrue(trans._reading)
proto.max_pipeline_size = 10
message = b'{ "id": 1, "method": "foo", "params": [] }'
interrupted = 0
for i in range(1000):
proto.data_received(message)
if not trans._reading:
interrupted += 1
gruvi.sleep(0) # run dispatcher
self.assertTrue(trans._reading)
mm = self.get_messages()
self.assertEqual(len(mm), 1000)
self.assertEqual(interrupted, 100)
message = json.loads(message.decode('utf8'))
for m in mm:
self.assertEqual(m, message)
def echo_app(message, transport, protocol):
if message.get('method') != 'echo':
protocol.send_response(message, error={'code': jsonrpc.METHOD_NOT_FOUND})
else:
protocol.send_response(message, message['params'])
def reflect_app(message, transport, protocol):
if message.get('method') != 'echo':
return
value = protocol.call_method('echo', *message['params'])
protocol.send_response(message, value)
def notification_app():
notifications = []
def application(message, transport, protocol):
if message.get('id') is None:
notifications.append((message['method'], message['params']))
elif message['method'] == 'get_notifications':
protocol.send_response(message, notifications)
return application
class TestJsonRpc(UnitTest):
def test_errno(self):
code = jsonrpc.SERVER_ERROR
self.assertIsInstance(code, int)
name = jsonrpc.errorcode[code]
self.assertIsInstance(name, str)
self.assertEqual(getattr(jsonrpc, name), code)
desc = jsonrpc.strerror(code)
self.assertIsInstance(desc, str)
def test_call_method_tcp(self):
server = JsonRpcServer(echo_app)
server.listen(('localhost', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
result = client.call_method('echo', 'foo')
self.assertEqual(result, ['foo'])
server.close()
client.close()
def test_call_method_pipe(self):
server = JsonRpcServer(echo_app)
server.listen(self.pipename(abstract=True))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
result = client.call_method('echo', 'foo')
self.assertEqual(result, ['foo'])
server.close()
client.close()
def test_call_method_ssl(self):
server = JsonRpcServer(echo_app)
server.listen(('localhost', 0), **self.ssl_s_args)
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr, **self.ssl_c_args)
result = client.call_method('echo', 'foo')
self.assertEqual(result, ['foo'])
server.close()
client.close()
def test_call_method_no_args(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
result = client.call_method('echo')
self.assertEqual(result, [])
server.close()
client.close()
def test_call_method_multiple_args(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
result = client.call_method('echo', 'foo', 'bar')
self.assertEqual(result, ['foo', 'bar'])
server.close()
client.close()
def test_call_method_error(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
exc = self.assertRaises(JsonRpcError, client.call_method, 'echo2')
self.assertIsInstance(exc, JsonRpcError)
self.assertIsInstance(exc.error, dict)
self.assertEqual(exc.error['code'], jsonrpc.METHOD_NOT_FOUND)
server.close()
client.close()
def test_send_notification(self):
server = JsonRpcServer(notification_app())
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
client.send_notification('notify_foo', 'foo')
notifications = client.call_method('get_notifications')
self.assertEqual(notifications, [['notify_foo', ['foo']]])
server.close()
client.close()
def test_call_method_ping_pong(self):
server = JsonRpcServer(reflect_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient(echo_app)
client.connect(addr)
result = client.call_method('echo', 'foo')
self.assertEqual(result, ['foo'])
server.close()
client.close()
def test_send_evil(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
exc = None
try:
chunk = b'{' * 1024
while True:
client.transport.write(chunk)
gruvi.sleep(0)
except Exception as e:
exc = e
self.assertIsInstance(exc, TransportError)
server.close()
client.close()
def test_send_whitespace(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
exc = None
try:
chunk = b' ' * 1024
while True:
client.transport.write(chunk)
gruvi.sleep(0)
except Exception as e:
exc = e
self.assertIsInstance(exc, TransportError)
server.close()
client.close()
def test_send_random(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
client = JsonRpcClient()
client.connect(addr)
exc = None
try:
while True:
chunk = os.urandom(1024)
client.transport.write(chunk)
gruvi.sleep(0)
except Exception as e:
exc = e
self.assertIsInstance(exc, TransportError)
server.close()
client.close()
def test_connection_limit(self):
server = JsonRpcServer(echo_app)
server.listen(('127.0.0.1', 0))
addr = server.addresses[0]
server.max_connections = 2
clients = []
exc = None
try:
for i in range(3):
client = JsonRpcClient(timeout=2)
client.connect(addr)
client.call_method('echo')
clients.append(client)
except Exception as e:
exc = e
self.assertIsInstance(exc, TransportError)
self.assertEqual(len(server.connections), server.max_connections)
for client in clients:
client.close()
server.close()
if __name__ == '__main__':
unittest.main()
| geertj/gruvi | tests/test_jsonrpc.py | Python | mit | 27,989 | 0.000322 |
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.rtyper.tool import rffi_platform as platform
from rpython.rtyper.lltypesystem import rffi, lltype
from hippy.tool.platform import get_gmake
import subprocess
import py
LIBDIR = py.path.local(__file__).join('..', 'lib', 'joaat/')
subprocess.check_call([get_gmake(), '-C', str(LIBDIR)])
eci = ExternalCompilationInfo(
includes=['joaat.h'],
library_dirs=[str(LIBDIR)],
libraries=['joaat1'],
testonly_libraries=['joaat'],
include_dirs=[str(LIBDIR)])
class CConfig:
_compilation_info_ = eci
JOAAT_CTX = platform.Struct('JOAAT_CTX', [])
globals().update(platform.configure(CConfig))
def external(name, args, result):
return rffi.llexternal(name, args, result,
compilation_info=eci, releasegil=False)
PTR_JOAAT_CTX = lltype.Ptr(JOAAT_CTX)
c_JOAATInit = external('JOAATInit',
[PTR_JOAAT_CTX], lltype.Void)
c_JOAATUpdate = external('JOAATUpdate',
[PTR_JOAAT_CTX,
rffi.CCHARP,
rffi.UINT],
lltype.Void)
c_JOAATFinal = external('JOAATFinal',
[rffi.CCHARP,
PTR_JOAAT_CTX],
lltype.Void)
| xhava/hippyvm | hippy/module/hash/cjoaat.py | Python | mit | 1,324 | 0.001511 |
"""
By specifying the 'proxy' Meta attribute, model subclasses can specify that
they will take data directly from the table of their base class table rather
than using a new table of their own. This allows them to act as simple proxies,
providing a modified interface to the data from the base class.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# A couple of managers for testing managing overriding in proxy model cases.
class PersonManager(models.Manager):
def get_queryset(self):
return super(PersonManager, self).get_queryset().exclude(name="fred")
class SubManager(models.Manager):
def get_queryset(self):
return super(SubManager, self).get_queryset().exclude(name="wilma")
@python_2_unicode_compatible
class Person(models.Model):
"""
A simple concrete base class.
"""
name = models.CharField(max_length=50)
objects = PersonManager()
def __str__(self):
return self.name
class Abstract(models.Model):
"""
A simple abstract base class, to be used for error checking.
"""
data = models.CharField(max_length=10)
class Meta:
abstract = True
class MyPerson(Person):
"""
A proxy subclass, this should not get a new table. Overrides the default
manager.
"""
class Meta:
proxy = True
ordering = ["name"]
permissions = (
("display_users", "May display users information"),
)
objects = SubManager()
other = PersonManager()
def has_special_name(self):
return self.name.lower() == "special"
class ManagerMixin(models.Model):
excluder = SubManager()
class Meta:
abstract = True
class OtherPerson(Person, ManagerMixin):
"""
A class with the default manager from Person, plus an secondary manager.
"""
class Meta:
proxy = True
ordering = ["name"]
class StatusPerson(MyPerson):
"""
A non-proxy subclass of a proxy, it should get a new table.
"""
status = models.CharField(max_length=80)
# We can even have proxies of proxies (and subclass of those).
class MyPersonProxy(MyPerson):
class Meta:
proxy = True
class LowerStatusPerson(MyPersonProxy):
status = models.CharField(max_length=80)
@python_2_unicode_compatible
class User(models.Model):
name = models.CharField(max_length=100)
def __str__(self):
return self.name
class UserProxy(User):
class Meta:
proxy = True
class UserProxyProxy(UserProxy):
class Meta:
proxy = True
# We can still use `select_related()` to include related models in our querysets.
class Country(models.Model):
name = models.CharField(max_length=50)
@python_2_unicode_compatible
class State(models.Model):
name = models.CharField(max_length=50)
country = models.ForeignKey(Country)
def __str__(self):
return self.name
class StateProxy(State):
class Meta:
proxy = True
# Proxy models still works with filters (on related fields)
# and select_related, even when mixed with model inheritance
@python_2_unicode_compatible
class BaseUser(models.Model):
name = models.CharField(max_length=255)
def __str__(self):
return ':'.join((self.__class__.__name__, self.name,))
class TrackerUser(BaseUser):
status = models.CharField(max_length=50)
class ProxyTrackerUser(TrackerUser):
class Meta:
proxy = True
@python_2_unicode_compatible
class Issue(models.Model):
summary = models.CharField(max_length=255)
assignee = models.ForeignKey(TrackerUser)
def __str__(self):
return ':'.join((self.__class__.__name__, self.summary,))
class Bug(Issue):
version = models.CharField(max_length=50)
reporter = models.ForeignKey(BaseUser)
class ProxyBug(Bug):
"""
Proxy of an inherited class
"""
class Meta:
proxy = True
class ProxyProxyBug(ProxyBug):
"""
A proxy of proxy model with related field
"""
class Meta:
proxy = True
class Improvement(Issue):
"""
A model that has relation to a proxy model
or to a proxy of proxy model
"""
version = models.CharField(max_length=50)
reporter = models.ForeignKey(ProxyTrackerUser)
associated_bug = models.ForeignKey(ProxyProxyBug)
class ProxyImprovement(Improvement):
class Meta:
proxy = True
| denisenkom/django | tests/proxy_models/models.py | Python | bsd-3-clause | 4,379 | 0.005481 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.