text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import pymeanshift as pms
from blobs.BlobResult import CBlobResult
from blobs.Blob import CBlob # Note: This must be imported in order to destroy blobs and use other methods
#############################################################################
# so, here is the main part of the program
if __name__ == '__main__':
import sys
import os
blob_overlay = True
file_name = "plates/license1.png"
if len(sys.argv) != 1:
file_name = sys.argv[1]
base_name = os.path.basename(file_name)
fname_prefix = ".".join(base_name.split(".")[:-1])
print fname_prefix
# Image load & conversion to cvmat
license_plate = cv2.imread(file_name, cv2.CV_LOAD_IMAGE_COLOR)
# Segment
segmented, labels, regions = pms.segment(license_plate, 3, 3, 50)
print "Segmentation results"
print "%s: %s" % ("labels", labels)
print "%s: %s" % ("regions", regions)
cv2.imwrite('%s_segmented.png' % fname_prefix, segmented)
license_plate = cv2.imread('%s_segmented.png' % fname_prefix, cv2.CV_LOAD_IMAGE_COLOR)
license_plate_size = (license_plate.shape[1], license_plate.shape[0])
license_plate_cvmat = cv2.cv.fromarray(license_plate)
license_plate_ipl = cv2.cv.CreateImage(license_plate_size, cv2.cv.IPL_DEPTH_8U, 3)
cv2.cv.SetData(
license_plate_ipl,
license_plate.tostring(),
license_plate.dtype.itemsize * 3 * license_plate.shape[1])
license_plate_white_ipl = cv2.cv.CreateImage(license_plate_size, cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.Set(license_plate_white_ipl, 255)
# Grayscale conversion
inverted_license_plate_grayscale_ipl = cv2.cv.CreateImage(
license_plate_size,
cv2.cv.IPL_DEPTH_8U, 1)
license_plate_grayscale_ipl = cv2.cv.CreateImage(
license_plate_size,
cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.CvtColor(
license_plate_cvmat,
license_plate_grayscale_ipl,
cv2.COLOR_RGB2GRAY);
license_plate_grayscale_np = np.asarray(license_plate_grayscale_ipl[:,:])
# We can also use cv.saveimage
# cv2.cv.SaveImage('license1_grayscale.png', license_plate_grayscale_ipl)
cv2.imwrite('%s_grayscale.png' % fname_prefix, license_plate_grayscale_np)
# Thresholding or binarization of images
(threshold_value, thresh_image) = cv2.threshold(
license_plate_grayscale_np,
128,
255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU)
print "Thresholding complete. Partition value is %d" % threshold_value
cv2.imwrite('%s_threshold.png' % fname_prefix, thresh_image)
# Create a mask that will cover the entire image
mask = cv2.cv.CreateImage (license_plate_size, 8, 1)
cv2.cv.Set(mask, 1)
#if not blob_overlay:
# # Convert black-and-white version back into three-color representation
# cv2.cv.CvtColor(my_grayscale, frame_cvmat, cv2.COLOR_GRAY2RGB);
# Blob detection
thresh_image_ipl = cv2.cv.CreateImage(license_plate_size, cv2.cv.IPL_DEPTH_8U, 1)
cv2.cv.SetData(
thresh_image_ipl,
thresh_image.tostring(),
thresh_image.dtype.itemsize * 1 * thresh_image.shape[1])
cv2.cv.Not(thresh_image_ipl, inverted_license_plate_grayscale_ipl)
# Min blob size and Max blob size
min_blob_size = 100 # Blob must be 30 px by 30 px
max_blob_size = 10000
threshold = 100
# Plate area as % of image area:
max_plate_to_image_ratio = 0.3
min_plate_to_image_ratio = 0.01
image_area = license_plate_size[0] * license_plate_size[1]
# Mask - Blob extracted where mask is set to 1
# Third parameter is threshold value to apply prior to blob detection
# Boolean indicating whether we find moments
myblobs = CBlobResult(thresh_image_ipl, mask, threshold, True)
myblobs.filter_blobs(min_blob_size, max_blob_size)
blob_count = myblobs.GetNumBlobs()
print "Found %d blob[s] betweeen size %d and %d using threshold %d" % (
blob_count, min_blob_size, max_blob_size, threshold)
for i in range(blob_count):
my_enumerated_blob = myblobs.GetBlob(i)
# print "%d: Area = %d" % (i, my_enumerated_blob.Area())
my_enumerated_blob.FillBlob(
license_plate_grayscale_ipl,
#license_plate_ipl,
#cv2.cv.Scalar(255, 0, 0),
cv2.cv.CV_RGB(255, 0, 0),
0, 0)
my_enumerated_blob.FillBlob(
license_plate_white_ipl,
#license_plate_ipl,
#cv2.cv.Scalar(255, 0, 0),
cv2.cv.CV_RGB(255, 255, 255),
0, 0)
# we can now save the image
#annotated_image = np.asarray(license_plate_ipl[:,:])
blob_image = np.asarray(license_plate_grayscale_ipl[:,:])
cv2.imwrite("%s_blobs.png" % fname_prefix, blob_image)
blob_white_image = np.asarray(license_plate_white_ipl[:,:])
cv2.imwrite("%s_white_blobs.png" % fname_prefix, blob_white_image)
# Looking for a rectangle - Plates are rectangular
# Thresholding image, the find contours then approxPolyDP
(threshold_value, blob_threshold_image) = cv2.threshold(
blob_white_image,
128,
255,
cv2.THRESH_BINARY | cv2.THRESH_OTSU)
print "Thresholding complete. Partition value is %d" % threshold_value
cv2.imwrite('%s_blob_threshold.png' % fname_prefix, blob_threshold_image)
# Blur to reduce noise?
#blurred_plate = cv2.GaussianBlur(blob_threshold_image, (5,5), 0)
#blob_threshold_image = blurred_plate
# Erode then dilate to reduce noise
blob_threshold_image_invert = cv2.bitwise_not(blob_threshold_image)
cv2.imwrite("%s_pre_dilated_and_eroded.png" % fname_prefix, blob_threshold_image_invert)
eroded_white_blobs = cv2.erode(blob_threshold_image_invert, None, iterations=4);
cv2.imwrite("%s_eroded_image.png" % fname_prefix, eroded_white_blobs)
dilated_white_blobs = cv2.dilate(eroded_white_blobs, None, iterations=4);
cv2.imwrite("%s_dilated.png" % fname_prefix, dilated_white_blobs)
blob_threshold_image = cv2.bitwise_not(blob_threshold_image_invert)
cv2.imwrite("%s_dilated_and_eroded.png" % fname_prefix, blob_threshold_image)
blob_threshold_image_invert = cv2.bitwise_not(blob_threshold_image)
contours, hierarchy = cv2.findContours(
blob_threshold_image,
cv2.RETR_LIST,
cv2.CHAIN_APPROX_SIMPLE)
#print "Contours: ", contours
# We now have contours. Approximate the polygon shapes
largest_rectangle_idx = 0
largest_rectangle_area = 0
rectangles = []
colours = ( (255,0,0), (0,255,0), (0,0,255), (255,255,0), (0,255,255))
for idx, contour in enumerate(contours):
print "Contour: %d" % idx
contour_area = cv2.contourArea(contour)
if float(contour_area / image_area) < min_plate_to_image_ratio:
print "Contour %d under threshold. Countour Area: %f" % (idx, contour_area)
continue
elif float(contour_area / image_area) > max_plate_to_image_ratio:
print "Contour %d over threshold. Countour Area: %f" % (idx, contour_area)
continue
approx = cv2.approxPolyDP(
contour,
0.02 * cv2.arcLength(contour, True),
True)
print "\n -"
print "%d. Countour Area: %f, Arclength: %f, Polygon %d colour:%s" % (idx,
contour_area,
cv2.arcLength(contour, True),
len(approx),
colours[idx%len(colours)])
minarea_rectangle = cv2.minAreaRect(contour)
minarea_box = cv2.cv.BoxPoints(minarea_rectangle)
print "> ", minarea_rectangle
print ">> ", minarea_box
centre, width_and_height, theta = minarea_rectangle
aspect_ratio = float(max(width_and_height) / min(width_and_height))
print " aspect ratio: %f for %s " % (aspect_ratio, width_and_height)
minarea_box = np.int0(minarea_box)
cv2.drawContours(license_plate, [minarea_box], 0, (255,0,255), 2)
cv2.drawContours(
license_plate,
[contours[idx]],
0,
colours[idx%len(colours)])
# Aspect ratio removal
if aspect_ratio < 3 or aspect_ratio > 5:
print " Aspect ratio bounds fails"
continue
# Rectangles have polygon shape 4
if len(approx) == 4:
# Select the largest rect
rectangles.append(contour)
if contour_area > largest_rectangle_area :
largest_rectangle_area = contour_area
largest_rectangle_idx = idx
print "Probable plate hit is %d" % largest_rectangle_idx
cv2.drawContours(
license_plate,
[contours[largest_rectangle_idx]],
0,
colours[0],
idx + 1)
cv2.imwrite("%s_contours_colored.png" % fname_prefix, license_plate)
# Create a mask for the detected plate
#hull = cv2.convexHull(contours[largest_rectangle_idx])
# This bounding rectangle does not consider rotation
license_plate = cv2.imread(file_name, cv2.CV_LOAD_IMAGE_COLOR)
bounding_rectangle = cv2.boundingRect(contours[largest_rectangle_idx])
b_rect_x, b_rect_y, b_rect_w, b_rect_h = bounding_rectangle
plate_rectangle = (b_rect_x, b_rect_y, b_rect_w, b_rect_h)
print "Plate rectangle is: ", plate_rectangle
cv2.rectangle(license_plate, (b_rect_x, b_rect_y), (b_rect_x + b_rect_w, b_rect_y + b_rect_h), (0, 255, 0), 2)
cv2.imwrite("%s_bounding_box.png" % fname_prefix, license_plate)
license_plate = cv2.imread(file_name, cv2.CV_LOAD_IMAGE_COLOR)
minarea_rectangle = cv2.minAreaRect(contours[largest_rectangle_idx])
minarea_box = cv2.cv.BoxPoints(minarea_rectangle)
minarea_box = np.int0(minarea_box)
cv2.drawContours(license_plate, [minarea_box], 0, (0,0,255), 2)
cv2.imwrite("%s_bounding_box_minarea.png" % fname_prefix, license_plate)
|
xmnlab/minilab
|
ia/ocr/alpr.py
|
Python
|
gpl-3.0
| 10,141 | 0.008776 |
#!/usr/bin/env python
from __future__ import print_function
from builtins import input
import sys
import pmagpy.pmag as pmag
def main():
"""
NAME
gofish.py
DESCRIPTION
calculates fisher parameters from dec inc data
INPUT FORMAT
takes dec/inc as first two columns in space delimited file
SYNTAX
gofish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file
-F FILE, specifies output file name
< filename for reading from standard input
OUTPUT
mean dec, mean inc, N, R, k, a95, csd
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with dec, inc data: ")
f=open(file,'r')
data=f.readlines()
elif '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data = sys.stdin.readlines() # read from standard input
ofile = ""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[ind+1]
out = open(ofile, 'w + a')
DIs= [] # set up list for dec inc data
for line in data: # read in the data from standard input
if '\t' in line:
rec=line.split('\t') # split each line on space to get records
else:
rec=line.split() # split each line on space to get records
DIs.append((float(rec[0]),float(rec[1])))
#
fpars=pmag.fisher_mean(DIs)
outstring='%7.1f %7.1f %i %10.4f %8.1f %7.1f %7.1f'%(fpars['dec'],fpars['inc'],fpars['n'],fpars['r'],fpars['k'],fpars['alpha95'], fpars['csd'])
if ofile == "":
print(outstring)
else:
out.write(outstring+'\n')
#
if __name__ == "__main__":
main()
|
Caoimhinmg/PmagPy
|
programs/gofish.py
|
Python
|
bsd-3-clause
| 1,976 | 0.0167 |
""" This file mainly exists to allow python setup.py test to work. """
import os
import sys
from django.conf import settings
settings.configure(
DEBUG=True,
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'testdb',
}
},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'userattributes',),
USERATTRIBUTE_FILTER_COUNT=3
)
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from django.test.utils import get_runner
def runtests():
""" Runs test.py """
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['userattributes'])
sys.exit(bool(failures))
if __name__ == '__main__':
runtests()
|
cschwede/django-userattributes
|
userattributes/runtests.py
|
Python
|
mit
| 846 | 0.002364 |
# -*- coding: utf-8 -*-
"""
project............: meat-a
description........: web application for sharing meta information
date...............: 04/2013
copyright..........: Sebastian Fedrau
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
##
# @file controller.py
# Controller classes.
## @package controller
# Controller classes.
import config, app, view, exception, util, template, factory, re, sys, inspect, os, logger, mimetypes
from base64 import b64decode, b64encode
## Converts an exception to a view.JSONView.
# @param e an exception
# @return a view.JSONView instance
def exception_to_json_view(e):
if not isinstance(e, exception.BaseException):
e = exception.InternalFailureException(str(e))
m = {}
m["message"] = e.message
if isinstance(e, exception.InvalidParameterException) or isinstance(e, exception.MissingParameterException):
m["field"] = e.parameter
v = view.JSONView(e.http_status)
v.bind(m)
if e.http_status == 401:
v.headers["WWW-Authenticate"] = "Basic realm=\"%s\"" % (b64encode(config.REALM))
return v
## Converts an exception to a view.HTMLTemplateView.
# @param e an exception
# @return a view.HTMLTemplateView instance
def exception_to_html_view(e):
if not isinstance(e, exception.BaseException):
e = exception.InternalFailureException(str(e))
v = view.HTMLTemplateView(e.http_status, template.MessagePage, config.DEFAULT_LANGUAGE)
v.bind({"title": "Exception", "message": e.message})
if e.http_status == 401:
v.headers["WWW-Authenticate"] = "Basic realm=\"%s\"" % (b64encode(config.REALM))
return v
## Controller base class.
class Controller:
def __init__(self, exception_handler=exception_to_json_view):
## An app.Application instance.
self.app = app.Application()
## Function to convert exceptions to a view.View instance.
self.__exception_handler = exception_handler
## A logger.
self.log = None
## Handles an HTTP request.
# @param request_id id for identifying the request
# @param method the HTTP method (post, get, put or delete)
# @param env a dictionary providing environment details
# @param kwargs received parameters
# @return a view.View instance with a binded model
def handle_request(self, request_id, method, env, **kwargs):
try:
self.log = logger.get_logger(request_id)
if method == "OPTIONS":
return self.__options__()
m = {"post": self.__post__, "get": self.__get__, "put": self.__put__, "delete": self.__delete__}
self.__start_process__(env, **kwargs)
self.__check_rate_limit__(env)
f = m[method.lower()]
# get function argument names:
spec = inspect.getargspec(f)
argnames = spec[0][2:]
# get argument values from kwargs:
values = util.select_values(kwargs, argnames)
# set default values:
defaults = spec[3]
if not defaults is None:
diff = len(values) - len(defaults)
for i in range(len(values)):
if values[i] is None and i >= diff:
values[i] = defaults[diff - i]
# test required parameters:
if hasattr(f, "__required__"):
for k, v in dict(zip(argnames, values)).items():
if k in f.__required__ and v is None:
raise exception.MissingParameterException(k)
# merge argument list:
args = [env] + values
# call method:
v = apply(f, args)
# default headers:
if not v.headers.has_key("Cache-Control"):
v.headers["Cache-Control"] = "no-cache"
v.headers["Access-Control-Allow-Origin"] = "*"
v.headers["Access-Control-Allow-Headers"] = "accept, authorization"
except:
self.log.error("Couldn't handle request: %s", sys.exc_info()[1])
v = self.__exception_handler(sys.exc_info()[1])
return v
def __start_process__(self, env, **kwargs):
pass
def __check_rate_limit__(self, env):
self.log.debug("Checking rate limit.")
if not config.LIMIT_REQUESTS_BY_IP:
return
address = env["REMOTE_ADDR"]
with factory.create_db_connection() as conn:
with conn.enter_scope() as scope:
db = factory.create_request_db()
count = db.count_requests_by_ip(scope, address, 3600)
self.log.debug("'%s' has made %d of %d allowed requests.", address, count, config.IP_REQUESTS_PER_HOUR)
if count > config.IP_REQUESTS_PER_HOUR:
raise exception.HTTPException(402, "IP request limit reached.")
db.add_request(scope, address)
scope.complete()
def __method_not_supported__(self):
return self.__exception_handler(exception.MethodNotSupportedException())
def __options__(self):
methods = ["OPTIONS"]
for m in ["__get__", "__post__", "__delete__", "__put__"]:
f = getattr(self, m).__func__
b = getattr(Controller, m).__func__
if not f is b:
methods.append(m[2:-2].upper())
v = view.View("text/plain", 200)
v.headers["Access-Control-Allow-Methods"] = ", ".join(methods)
v.headers["Access-Control-Allow-Origin"] = "*"
v.headers["Access-Control-Allow-Headers"] = "accept, authorization"
return v
def __post__(self, env, *args):
return self.__method_not_supported__()
def __get__(self, env, *args):
return self.__method_not_supported__()
def __put__(self, env, *args):
return self.__method_not_supported__()
def __delete__(self, env, *args):
return self.__method_not_supported__()
## A controller with HTTP basic authentication support.
class AuthorizedController(Controller):
def __init__(self ):
Controller.__init__(self)
self.username = None
def __start_process__(self, env, **kwargs):
# get & decode Authorization header:
try:
self.log.debug( "Starting HTTP basic authentication.")
header = env["HTTP_AUTHORIZATION"]
self.log.debug("Found Authorization header: '%s'", header)
m = re.match("^Basic ([a-zA-Z0-9=/_\-]+)$", header)
auth = b64decode(m.group(1))
index = auth.find(":")
if index == -1:
raise exception.HTTPException(400, "Bad request. Authorization header is malformed.")
self.username, password = auth[:index], auth[index + 1:]
self.log.debug("Parsed Authorization header: '%s:%s'", self.username, password)
except KeyError:
raise exception.AuthenticationFailedException()
except:
raise exception.HTTPException(400, "Bad request: Authorization header is malformed.")
# validate password:
authenticated = False
try:
authenticated = self.app.validate_password(self.username, password)
except exception.UserNotFoundException:
pass
except exception.UserIsBlockedException:
pass
except:
raise sys.exc_info()[1]
if not authenticated:
raise exception.NotAuthorizedException()
def __check_rate_limit__(self, env):
self.log.debug("Checking rate limit.")
if not config.LIMIT_REQUESTS_BY_IP and not config.LIMIT_REQUESTS_BY_USER:
return
address = env["REMOTE_ADDR"]
with factory.create_db_connection() as conn:
with conn.enter_scope() as scope:
request_db = factory.create_request_db()
user_db = factory.create_user_db()
if config.LIMIT_REQUESTS_BY_IP:
count = request_db.count_requests_by_ip(scope, address, 3600)
self.log.debug("'%s' has made %d of %d allowed requests.", address, count, config.IP_REQUESTS_PER_HOUR)
if count > config.IP_REQUESTS_PER_HOUR:
raise exception.HTTPException(402, "IP request limit reached.")
user_id = user_db.map_username(scope, self.username)
if config.LIMIT_REQUESTS_BY_USER:
count = request_db.count_requests_by_user_id(scope, user_id, 3600)
self.log.debug("'%s' (%d) has made %d of %d allowed requests.", self.username, user_id, count, config.USER_REQUESTS_PER_HOUR)
if count > config.USER_REQUESTS_PER_HOUR:
raise exception.HTTPException(402, "User request limit reached.")
request_db.add_request(scope, address, user_id)
scope.complete()
## Requests new user accounts.
class AccountRequest(Controller):
def __init__(self):
Controller.__init__(self)
## Requests a user account.
# @param env environment data
# @param username name of the requested user account
# @param email email address of the requested user account
# @return URL of the registration website
def __post__(self, env, username, email):
id, code = self.app.request_account(username, email)
url = util.build_url("/html/registration/%s", config.WEBSITE_URL, id)
v = view.JSONView(201)
v.headers["Location"] = url
v.headers["ETag"] = util.hash(url)
m = {"Location": url}
v.bind(m)
return v
__post__.__required__ = ["username", "email"]
## Activates requested user account with corresponding id & code.
class AccountActivation(Controller):
def __init__(self):
Controller.__init__(self, exception_to_html_view)
## User activation website.
# @param env environment data
# @param id request id
# @param code activation code (optional)
# @return a website
def __get__(self, env, id, code):
with factory.create_db_connection() as connection:
db = factory.create_user_db()
with connection.enter_scope() as scope:
if not db.user_request_id_exists(scope, id):
raise exception.NotFoundException("Request id not found.")
v = view.HTMLTemplateView(200, template.AccountActivationPage, config.DEFAULT_LANGUAGE)
v.bind({"id": id, "code": code, "error_field": None})
return v
__get__.__required__ = ["id"]
## Activates a user account.
# @param env environment data
# @param id request id
# @param code activation code
# @param new_password1 new password to set
# @param new_password2 repeated password
# @return a website displaying a success message or a website for entering the request code
def __post__(self, env, id, code, new_password1, new_password2):
tpl = template.AccountActivatedPage
status = 200
try:
username, email, _ = self.app.activate_user(id, code)
m = {"username": username}
except exception.InvalidRequestCodeException as e:
tpl = template.AccountActivationPage
status = e.http_status
m = {"id": id, "code": code, "error_field": "code"}
v = view.HTMLTemplateView(status, tpl, config.DEFAULT_LANGUAGE)
v.bind(m)
return v
__post__.__required__ = ["id", "code"]
## Updates user password.
class UserPassword(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Updates the user password.
# @param env environment data
# @param old_password the current password
# @param new_password1 new password to set
# @param new_password2 repeated new password
# @return the new password (on success)
def __post__(self, env, old_password, new_password1, new_password2):
self.app.change_password(self.username, old_password, new_password1, new_password2)
v = view.JSONView(200)
m = {"password": new_password1}
v.bind(m)
return v
__post__.__required__ = ["old_password", "new_password1", "new_password2"]
## Requests a new password.
class PasswordRequest(Controller):
def __init__(self):
Controller.__init__(self)
## Requests a new password.
# @param env environment data
# @param username name of the user who wants to set a new password
# @param email the user's email address
# @return location of the generated resource to change the password
def __post__(self, env, username, email):
id, code = self.app.request_new_password(username, email)
url = util.build_url("/html/user/%s/password/reset/%s", config.WEBSITE_URL, username, id)
v = view.JSONView(201)
v.headers["Location"] = url
v.headers["ETag"] = util.hash(url)
m = {"Location": url}
v.bind(m)
return v
__post__.__required__ = ["username", "email"]
## Resets a password using a corresponding code & id.
class PasswordChange(Controller):
def __init__(self):
Controller.__init__(self, exception_to_html_view)
## A website to change the user's password.
# @param env environment data
# @param id password change request id
# @param code a related code (optional)
# @return a website
def __get__(self, env, id, code):
with factory.create_db_connection() as connection:
db = factory.create_user_db()
with connection.enter_scope() as scope:
if not db.password_request_id_exists(scope, id):
raise exception.NotFoundException("Request id not found.")
v = view.HTMLTemplateView(200, template.ChangePasswordPage, config.DEFAULT_LANGUAGE)
v.bind({"id": id, "code": code, "error_field": None})
return v
__get__.__required__ = ["id"]
## Sets a new password.
# @param env environment data
# @param id password change request id
# @param code a related code
# @param new_password1 new password to set
# @param new_password2 repeated password
# @return a website displaying a success message or a website for entering the new password and request code
def __post__(self, env, id, code, new_password1, new_password2):
tpl = template.PasswordChangedPage
status = 200
try:
username, _ = self.app.reset_password(id, code, new_password1, new_password2)
m = {"username": username}
except exception.BaseException as e:
tpl = template.ChangePasswordPage
status = e.http_status
m = {"id": id, "code": code}
if isinstance(e, exception.InvalidRequestCodeException):
m["error_field"] = "code"
elif isinstance(e, exception.InvalidParameterException):
m["error_field"] = e.parameter
else:
raise e
v = view.HTMLTemplateView(status, tpl, config.DEFAULT_LANGUAGE)
v.bind(m)
return v
__post__.__required__ = ["id", "code"]
## Updates, gets or deletes a user account.
class UserAccount(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Changes user details.
# @param env environment data
# @param email email address to set
# @param firstname firstname to set
# @param lastname lastname to set
# @param gender gender to set
# @param language language to set
# @param protected protected status to set
# @return new user details
def __post__(self, env, email, firstname, lastname, gender, language, protected):
self.app.update_user_details(self.username, email, firstname, lastname, gender, language, util.to_bool(protected))
v = view.JSONView(200)
m = self.app.get_full_user_details(self.username)
v.bind(m)
return v
__post__.__required__ = ["email"]
## Gets user details.
# @param env environment data
# @param username name of the user to get details from
# @return user details
def __get__(self, env, username):
if username.lower() == self.username.lower():
m = self.app.get_full_user_details(username)
else:
m = self.app.get_user_details(self.username, username)
v = view.JSONView(200)
v.bind(m)
return v
__get__.__required = ["username"]
## Disables a user account.
# @param env environment data
# @param username name of the user to deactivate
# @return no content (status 204)
def __delete__(self, env, username):
if not username.lower() == username:
raise exception.NotAuthorizedException()
self.app.disable_user(username)
return view.EmptyView(204)
__delete__.__required = ["username"]
## Updates or downloads an avatar.
# @todo not implemented yet
class Avatar(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
def __post__(self, env, filename, file):
name = self.app.update_avatar(self.username, filename, file)
v = view.JSONView(200)
m = {"filename": name}
v.bind(m)
return v
__post__.__required__ = ["filename", "file"]
def __get__(self, env, username):
details = self.app.get_user_details(self.username, username)
try:
avatar = details["avatar"]
if avatar is None:
raise exception.NotFoundException("Avatar not found.")
# build path & get mime type:
path = os.path.join(config.AVATAR_DIR, avatar)
mime = mimetypes.guess_type(path)[0]
if not os.path.isfile(path):
raise exception.NotFoundException("Avatar not found.")
# send base64 encoded image?
if "text/plain" in env["HTTP_ACCEPT"]:
filename, _ = os.path.splitext(path)
path = "%s.b64" % filename
mime = "text/plain"
v = view.FileView(200, mime)
v.headers["Cache-Control"] = "max-age=900"
v.bind({"filename": path})
return v
except KeyError:
raise exception.NotAuthorizedException()
__get__.__required__ = ["username"]
## Searches the user database.
class Search(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Searches the user database.
# @param env environment data
# @param query search query
# @return a list with found usernames
def __get__(self, env, query):
m = self.app.find_user(self.username, query)
v = view.JSONView(200)
v.bind(m)
return v
## Updates or gets friendship details.
class Friendship(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets friendship details.
# @param env environment data
# @param username user to get friendship status from
# @return friendship details
def __get__(self, env, username):
return self.__get_friendship__(username)
__get__.__required__ = ["username"]
## Follows a user.
# @param env environment data
# @param username user to follow
# @return friendship details
def __put__(self, env, username):
return self.__change_friendship__(username, True)
__put__.__required__ = ["username"]
## Unfollows a user.
# @param env environment data
# @param username user to unfollow
# @return friendship details
def __delete__(self, env, username):
return self.__change_friendship__(username, False)
__delete__.__required__ = ["username"]
def __change_friendship__(self, username, friendship):
try:
self.app.follow(self.username, username, friendship)
except exception.ConflictException:
pass
except exception.NotFoundException:
pass
return self.__get_friendship__(username)
def __get_friendship__(self, username):
m = self.app.get_friendship(self.username, username)
v = view.JSONView(200)
v.bind(m)
return v
## Gets messages.
class Messages(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets messages.
# @param env environment data
# @param limit maximum number of received messages
# @param after only get messages created after the given timestamp
# @return messages sent to the user account
def __get__(self, env, limit=50, after=None):
m = self.app.get_messages(self.username, int(limit), after)
v = view.JSONView(200)
v.bind(m)
return v
## Gets public messages.
class PublicMessages(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets public messages.
# @param env environment data
# @param limit maximum number of messages to receive
# @param after only get messages created after the given timestamp
# @return public messages
def __get__(self, env, limit=50, after=None):
m = self.app.get_public_messages(self.username, int(limit), after)
v = view.JSONView(200)
v.bind(m)
return v
## Gets objects (ordered by timestamp).
class Objects(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets objects.
# @param env environment data
# @param page page index
# @param page_size page size
# @return objects ordered by timestamp (descending)
def __get__(self, env, page=0, page_size=10):
m = self.app.get_objects(int(page), int(page_size))
v = view.JSONView(200)
v.bind(m)
return v
## Gets random objects.
class RandomObjects(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets random objects.
# @param env environment data
# @param page_size page size
# @return random objects
def __get__(self, env, page_size=10):
m = self.app.get_random_objects(int(page_size))
v = view.JSONView(200)
v.bind(m)
return v
## Gets popular objects.
class PopularObjects(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets popular objects.
# @param env environment data
# @param page page index
# @param page_size page size
# @return objects ordered by popularity
def __get__(self, env, page=0, page_size=10):
m = self.app.get_popular_objects(int(page), int(page_size))
v = view.JSONView(200)
v.bind(m)
return v
## Gets objects filtered by tag.
class TaggedObjects(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets objects assigned to a tag.
# @param env environment data
# @param tag a tag
# @param page page index
# @param page_size page size
# @return objects assigned to a tag
def __get__(self, env, tag, page=0, page_size=10):
m = self.app.get_tagged_objects(tag, int(page), int(page_size))
v = view.JSONView(200)
v.bind(m)
return v
## Gets tag cloud.
class TagCloud(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets a tag cloud.
# @param env environment data
# @return a tag cloud
def __get__(self, env):
m = self.app.get_tag_cloud()
v = view.JSONView(200)
v.bind(m)
return v
## Gets object details.
class Object(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets object details.
# @param env environment data
# @param guid guid of the object to get details from
# @return object details
def __get__(self, env, guid):
m = self.app.get_object(guid)
v = view.JSONView(200)
v.bind(m)
return v
__get__.__required__ = ["guid"]
## Gets or sets object tag(s).
class ObjectTags(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets the tags assigned to an object.
# @param env environment data
# @param guid guid of the object to get tags from
# @return a tag list
def __get__(self, env, guid):
return self.__get_tags__(guid)
__get__.__required__ = ["guid"]
## Assigns tags to an object.
# @param env environment data
# @param guid guid of the object to tag
# @param tags comma-separated list of tags
# @return a tag list
def __put__(self, env, guid, tags):
tags = list(util.split_strip_set(tags, ","))
if len(tags) == 0:
raise exception.HTTPException(400, "tag list cannot be empty.")
self.app.add_tags(guid, self.username, tags)
return self.__get_tags__(guid)
__put__.__required__ = ["guid", "tags"]
def __get_tags__(self, guid):
obj = self.app.get_object(guid)
m = obj["tags"]
v = view.JSONView(200)
v.bind(m)
return v
## Votes object.
class Voting(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets the user's vote.
# @param env environment data
# @param guid object guid
# @return the vote
def __get__(self, env, guid):
return self.__get_voting__(guid)
__get__.__required__ = ["guid"]
## Votes an object.
# @param env environment data
# @param guid object guid
# @param up up or downvote flag
# @return the vote
def __post__(self, env, guid, up):
self.app.vote(self.username, guid, util.to_bool(up))
return self.__get_voting__(guid)
__post__.__required__ = ["guid", "up"]
def __get_voting__(self, guid):
up = self.app.get_voting(self.username, guid)
m = { "up": up }
v = view.JSONView(200)
v.bind(m)
return v
## Gets or adds comment(s).
class Comments(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets object comments.
# @param env environment data
# @param guid guid of an object
# @param page page index
# @param page_size page size
# @return object comments
def __get__(self, env, guid, page=0, page_size=50):
return self.__get_comments__(guid, page, page_size)
__get__.__required__ = ["guid"]
## Adds a comment to an object.
# @param env environment data
# @param guid guid of an object
# @param text the comment
# @return object comments
def __post__(self, env, guid, text):
self.app.add_comment(guid, self.username, text)
return self.__get_comments__(guid)
__post__.__required__ = ["guid", "text"]
def __get_comments__(self, guid, page=0, page_size=50):
m = self.app.get_comments(guid, self.username, page, page_size)
v = view.JSONView(200)
v.bind(m)
return v
## Gets a single comment.
class Comment(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets a single comment.
# @param env environment data
# @param id id of the comment to get
# @return a comment
def __get__(self, env, id):
self.__test_required_parameters__(id)
m = self.app.get_comment(int(id), self.username)
v = view.JSONView(200)
v.bind(m)
return v
__get__.__required__ = ["id"]
## Favorite base methods.
class FavoriteBase(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
def __change_favorite__(self, guid, favorite):
try:
self.app.favor(self.username, guid, favorite)
except exception.ConflictException:
pass
except exception.NotFoundException:
pass
return self.__get_favorites__()
def __get_favorites__(self):
m = self.app.get_favorites(self.username)
v = view.JSONView(200)
v.bind(m)
return v
## Gets favorites.
class Favorites(FavoriteBase):
def __init__(self):
FavoriteBase.__init__(self)
## Gets the favorites of the user.
# @param env environment data
# @return the user's favorite list
def __get__(self, env):
return self.__get_favorites__()
## Add/remove favorite.
class Favorite(FavoriteBase):
def __init__(self):
FavoriteBase.__init__(self)
## Adds an object to the user's favorite list.
# @param env environment data
# @param guid guid of the object to add
# @return the user's favorite list
def __put__(self, env, guid):
return self.__change_favorite__(guid, True)
__put__.__required__ = ["guid"]
## Removes an object from the user's favorite list.
# @param env environment data
# @param guid guid of the object to remove
# @return the user's favorite list
def __delete__(self, env, guid):
return self.__change_favorite__(guid, False)
__delete__.__required__ = ["guid"]
## Gets recommendations.
class Recommendations(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Gets objects recommended to the user.
# @param env environment data
# @param page page index
# @param page_size page size
# @return recommended objects
def __get__(self, env, page=0, page_size=10):
m = self.app.get_recommendations(self.username, int(page), int(page_size))
v = view.JSONView(200)
v.bind(m)
return v
## Recommends an object.
class Recommendation(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Recommends an object to other users.
# @param env environment data
# @param guid guid of the object to recommend
# @param receivers comma-separated list of users to recommend the object to
# @return users the object has been recommended to
def __put__(self, env, guid, receivers):
receivers = list(util.split_strip_set(receivers, ","))
if len(receivers) == 0:
raise exception.HTTPException(400, "receiver list cannot be empty.")
self.app.recommend(self.username, receivers, guid)
m = {"guid": guid, "receivers": receivers}
v = view.JSONView(200)
v.bind(m)
return v
__put__.__required__ = ["guid", "receivers"]
## Flags object abused.
class ReportAbuse(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
## Flags an object abused.
# @param env environment data
# @param guid of the object to flag
# @return abuse status
def __put__(self, env, guid):
self.__test_required_parameters__(guid)
self.app.report_abuse(guid)
m = { "guid": guid, "reported": True }
v = view.JSONView(200)
v.bind(m)
return v
__put__.__required__ = ["guid"]
"""
## Serve static file.
class Base64Image(AuthorizedController):
def __init__(self):
AuthorizedController.__init__(self)
def __get_file__(self, basedir, filename):
path = os.path.join(basedir, filename)
# search & serve file:
self.log.debug("Searching file: '%s'", path)
if not os.path.isfile(path):
raise exception.NotFoundException("File not found.")
v = view.FileView(200, "text/plain")
v.headers["Cache-Control"] = "max-age=31536000"
v.bind({"filename": path})
return v
## Gets a base64 encoded image file.
class Image(Base64Image):
def __init__(self):
Base64Image.__init__(self)
def __get__(self, env, filename):
index = filename.rfind(".")
return self.__get_file__(config.IMAGE_LIBRARY_BASE64_PATH, "%s.base64" % filename[:index])
## Gets a thumbnail.
class Thumbnail(Base64Image):
def __init__(self):
Base64Image.__init__(self)
def __get__(self, env, filename):
index = filename.rfind(".")
return self.__get_file__(config.IMAGE_LIBRARY_BASE64_PATH, "%s.thumbnail.base64" % filename[:index])
"""
|
20centaurifux/meat-a
|
controller.py
|
Python
|
agpl-3.0
| 29,635 | 0.02902 |
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 mk@mathias-kettner.de |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import re
import defaults
from lib import *
class MKGuitestFailed(MKException):
def __init__(self, errors):
self.errors = errors
MKException.__init__(self, _("GUI Test failed"))
class GUITester:
def __init__(self):
self.guitest = None
self.replayed_guitest_step = None
self.guitest_repair_step = None
def init_guitests(self):
if self.myfile == "guitest":
self.replay_guitest()
elif self.guitest_recording_active():
self.begin_guitest_recording()
def begin_guitest_recording(self):
self.guitest = {
"variables" : self.vars.copy(),
"filename" : self.myfile,
"output" : {},
}
# Fix transaction ID: We are just interested in whether it is valid or not
if "_transid" in self.vars:
if self.transaction_valid():
self.guitest["variables"]["_transid"] = "valid"
else:
self.guitest["variables"]["_transid"] = "invalid"
self.add_status_icon("guitest", _("GUI test recording is active"))
def end_guitest_recording(self):
if self.guitest != None:
self.guitest["user"] = self.user
self.guitest["elapsed_time"] = time.time() - self.start_time
self.save_guitest_step(self.guitest)
def save_guitest_step(self, step):
path = defaults.var_dir + "/guitests/RECORD"
if not os.path.exists(path):
test_steps = []
else:
test_steps = eval(file(path).read())
if self.guitest_repair_step != None:
mod_step = test_steps[self.guitest_repair_step]
mod_step["output"] = step["output"]
mod_step["user"] = step["user"]
mod_step["elapsed_time"] = step["elapsed_time"]
else:
test_steps.append(step)
file(path, "w").write("%s\n" % pprint.pformat(test_steps))
def load_guitest(self, name):
path = defaults.var_dir + "/guitests/" + name + ".mk"
try:
return eval(file(path).read())
except IOError, e:
raise MKGeneralException(_("Cannot load GUI test file %s: %s") % (self.attrencode(path), e))
def replay_guitest(self):
test_name = self.var("test")
if not test_name:
raise MKGuitestFailed([_("Missing the name of the GUI test to run (URL variable 'test')")])
guitest = self.load_guitest(test_name)
step_nr_text = self.var("step")
try:
step_nr = int(step_nr_text)
except:
raise MKGuitestFailed([_("Invalid or missing test step number (URL variable 'step')")])
if step_nr >= len(guitest) or step_nr < 0:
raise MKGuitestFailed([_("Invalid test step number %d (only 0...%d)") % (step_nr, len(guitest)-1)])
repair = self.var("repair") == "1"
if repair:
self.guitest_repair_step = step_nr
self.begin_guitest_recording()
self.replayed_guitest_step = guitest[step_nr]
self.replayed_guitest_step["replay"] = {}
self.myfile = self.replayed_guitest_step["filename"]
self.guitest_fake_login(self.replayed_guitest_step["user"])
self.vars = self.replayed_guitest_step["variables"]
if "_transid" in self.vars and self.vars["_transid"] == "valid":
self.vars["_transid"] = self.get_transid()
self.store_new_transids()
def guitest_recording_active(self):
# Activated by symoblic link pointing to recording file
return os.path.lexists(defaults.var_dir + "/guitests/RECORD") and not \
self.myfile in self.guitest_ignored_pages()
def guitest_ignored_pages(self):
return [ "run_cron", "index", "side", "sidebar_snapin", "dashboard", "dashboard_dashlet", "login" ]
def guitest_record_output(self, key, value):
if self.guitest:
self.guitest["output"].setdefault(key, []).append(value)
elif self.replayed_guitest_step:
self.replayed_guitest_step["replay"].setdefault(key, []).append(value)
def finalize_guitests(self):
if self.guitest:
self.end_guitest_recording()
if self.replayed_guitest_step:
try:
self.end_guitest_replay()
except MKGuitestFailed, e:
self.write("\n[[[GUITEST FAILED]]]\n%s" % ("\n".join(e.errors)))
def end_guitest_replay(self):
if self.replayed_guitest_step and self.guitest_repair_step == None:
errors = []
for varname in self.replayed_guitest_step["output"].keys():
method = self.guitest_test_method(varname)
errors += [ "%s: %s" % (varname, error)
for error in method(
self.replayed_guitest_step["output"][varname],
self.replayed_guitest_step["replay"].get(varname, [])) ]
if errors:
raise MKGuitestFailed(errors)
def guitest_test_method(self, varname):
if varname == "data_tables":
return guitest_check_datatables
elif varname == "page_title":
return guitest_check_single_value
else:
return guitest_check_element_list
def guitest_check_single_value(reference, reality):
if len(reference) > 1:
errors.append("More than one reference value: %s" % ", ".join(reference))
if len(reality) > 1:
errors.append("More than one value: %s" % ", ".join(reality))
diff_text = guitest_check_text(reference[0], reality[0])
if diff_text:
return [ diff_text ]
else:
return []
def guitest_check_element_list(reference, reality):
errors = []
one_missing = False
for entry in reference:
if not guitest_entry_in_reference_list(entry, reality):
errors.append("missing entry %r" % (entry,))
one_missing = True
if one_missing:
for entry in reality:
if not guitest_entry_in_reference_list(entry, reference):
errors.append("exceeding entry %r" % (entry,))
return errors
def guitest_entry_in_reference_list(entry, ref_list):
for ref_entry in ref_list:
if guitest_entries_match(ref_entry, entry):
return True
return False
def guitest_entries_match(ref, real):
if type(ref) in (list, tuple):
return len(ref) == len(real) and \
map(guitest_drop_dynamic_ids, ref) == map(guitest_drop_dynamic_ids, real)
else:
return guitest_drop_dynamic_ids(ref) == guitest_drop_dynamic_ids(real)
def guitest_check_datatables(reference, reality):
if len(reference) != len(reality):
return [ _("Expected %d data tables, but got %d") % (len(reference), len(reality)) ]
errors = []
for ref_table, real_table in zip(reference, reality):
errors += guitest_check_datatable(ref_table, real_table)
return errors
def guitest_check_datatable(ref_table, real_table):
if ref_table["id"] != real_table["id"]:
return [ "Table id %s expected, but got %s" % (ref_table["id"], real_table["id"]) ]
if len(ref_table["rows"]) != len(real_table["rows"]):
return [ "Table %s: expected %d rows, but got %d" % (
ref_table["id"], len(ref_table["rows"]), len(real_table["rows"])) ]
for row_nr, (ref_row, real_row) in enumerate(zip(ref_table["rows"], real_table["rows"])):
if len(ref_row) != len(real_row):
return [ "Table %s, row %d: expected %d columns, but got %d" % (
ref_table["id"], row_nr+1, len(ref_row), len(real_row)) ]
# Note: Rows are tuples. The first component is the list of cells
for cell_nr, (ref_cell, real_cell) in enumerate(zip(ref_row[0], real_row[0])):
# Note: cell is a triple. The first component contains the text
diff_text = guitest_check_text(ref_cell[0], real_cell[0])
if diff_text:
return [ "Row %d, Column %d: %s" % (row_nr, cell_nr, diff_text) ]
return []
def guitest_check_text(ref, real):
ref_clean = guitest_drop_dynamic_ids(ref)
real_clean = guitest_drop_dynamic_ids(real)
if ref_clean == real_clean:
return ""
prefix, ref_rest, real_rest = find_common_prefix(ref_clean, real_clean)
return "expected %s[[[%s]]], got %s[[[%s]]]" % (prefix, ref_rest, prefix, real_rest)
def find_common_prefix(a, b):
if len(a) > len(b) and a.startswith(b):
return b, a[:len(b)], ""
if len(b) > len(a) and b.startswith(a):
return a, "", b[:len(a)]
for i in range(min(len(a), len(b))):
if a[i] != b[i]:
return a[:i], a[i:], b[i:]
return a, "", ""
def guitest_drop_dynamic_ids(text):
return re.sub("selection(%3d|=)[a-f0-9---]{36}", "selection=*",
re.sub("_transid=1[4-6][0-9]{8}/[0-9]+", "_transid=TRANSID", text))
|
oposs/check_mk_mirror
|
web/htdocs/guitester.py
|
Python
|
gpl-2.0
| 10,454 | 0.005452 |
from strings_anagrams import are_anagrams
def test_funny_anagrams():
assert are_anagrams("the eyes", "they see")
assert are_anagrams("Allahu Akbar, Obama", "Aha bub, koala alarm")
assert are_anagrams("Donald Trump", "Damp Old Runt")
def test_same_is_anagram():
assert are_anagrams("foo", "Foo")
assert are_anagrams(" ", " ")
def test_wrong():
assert not are_anagrams("mary", "cow")
assert not are_anagrams("123", "12345")
def test_explosion():
assert not are_anagrams(None, "")
assert not are_anagrams(321, 123)
|
elli0ttB/problems
|
sorting/test_are_anagrams.py
|
Python
|
mit
| 556 | 0.008993 |
# encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
from .QGraphicsLayoutItem import QGraphicsLayoutItem
class QGraphicsLayout(QGraphicsLayoutItem):
""" QGraphicsLayout(QGraphicsLayoutItem parent=None) """
def activate(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.activate() """
pass
def addChildLayoutItem(self, QGraphicsLayoutItem): # real signature unknown; restored from __doc__
""" QGraphicsLayout.addChildLayoutItem(QGraphicsLayoutItem) """
pass
def count(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.count() -> int """
return 0
def getContentsMargins(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.getContentsMargins() -> (float, float, float, float) """
pass
def invalidate(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.invalidate() """
pass
def isActivated(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.isActivated() -> bool """
return False
def itemAt(self, p_int): # real signature unknown; restored from __doc__
""" QGraphicsLayout.itemAt(int) -> QGraphicsLayoutItem """
return QGraphicsLayoutItem
def removeAt(self, p_int): # real signature unknown; restored from __doc__
""" QGraphicsLayout.removeAt(int) """
pass
def setContentsMargins(self, p_float, p_float_1, p_float_2, p_float_3): # real signature unknown; restored from __doc__
""" QGraphicsLayout.setContentsMargins(float, float, float, float) """
pass
def updateGeometry(self): # real signature unknown; restored from __doc__
""" QGraphicsLayout.updateGeometry() """
pass
def widgetEvent(self, QEvent): # real signature unknown; restored from __doc__
""" QGraphicsLayout.widgetEvent(QEvent) """
pass
def __init__(self, QGraphicsLayoutItem_parent=None): # real signature unknown; restored from __doc__
pass
def __len__(self, *args, **kwargs): # real signature unknown
""" Return len(self). """
pass
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QGraphicsLayout.py
|
Python
|
gpl-2.0
| 2,337 | 0.009414 |
# -*- coding: utf-8 -*-
"""Tests for aiohttp/client.py"""
import asyncio
import gc
import unittest
from unittest import mock
import aiohttp
from aiohttp.client import ClientSession
from aiohttp.multidict import MultiDict, CIMultiDict
from aiohttp.connector import BaseConnector, TCPConnector
class TestClientSession(unittest.TestCase):
maxDiff = None
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.run = self.loop.run_until_complete
def tearDown(self):
self.loop.close()
gc.collect()
def make_open_connector(self):
conn = BaseConnector(loop=self.loop)
transp = unittest.mock.Mock()
conn._conns['a'] = [(transp, 'proto', 123)]
return conn
def test_init_headers_simple_dict(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
self.assertEqual(
sorted(session._default_headers.items()),
([("H1", "header1"),
("H2", "header2")]))
session.close()
def test_init_headers_list_of_tuples(self):
session = ClientSession(
headers=[("h1", "header1"),
("h2", "header2"),
("h3", "header3")],
loop=self.loop)
self.assertEqual(
session._default_headers,
CIMultiDict([("h1", "header1"),
("h2", "header2"),
("h3", "header3")]))
session.close()
def test_init_headers_MultiDict(self):
session = ClientSession(
headers=MultiDict(
[("h1", "header1"),
("h2", "header2"),
("h3", "header3")]),
loop=self.loop)
self.assertEqual(
session._default_headers,
CIMultiDict([("H1", "header1"),
("H2", "header2"),
("H3", "header3")]))
session.close()
def test_init_headers_list_of_tuples_with_duplicates(self):
session = ClientSession(
headers=[("h1", "header11"),
("h2", "header21"),
("h1", "header12")],
loop=self.loop)
self.assertEqual(
session._default_headers,
CIMultiDict([("H1", "header11"),
("H2", "header21"),
("H1", "header12")]))
session.close()
def test_init_cookies_with_simple_dict(self):
session = ClientSession(
cookies={
"c1": "cookie1",
"c2": "cookie2"
}, loop=self.loop)
self.assertEqual(set(session.cookies), {'c1', 'c2'})
self.assertEqual(session.cookies['c1'].value, 'cookie1')
self.assertEqual(session.cookies['c2'].value, 'cookie2')
session.close()
def test_init_cookies_with_list_of_tuples(self):
session = ClientSession(
cookies=[("c1", "cookie1"),
("c2", "cookie2")],
loop=self.loop)
self.assertEqual(set(session.cookies), {'c1', 'c2'})
self.assertEqual(session.cookies['c1'].value, 'cookie1')
self.assertEqual(session.cookies['c2'].value, 'cookie2')
session.close()
def test_merge_headers(self):
# Check incoming simple dict
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers({
"h1": "h1"
})
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("h2", "header2"),
("h1", "h1")
]))
session.close()
def test_merge_headers_with_multi_dict(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers(MultiDict([("h1", "h1")]))
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("h2", "header2"),
("h1", "h1")
]))
session.close()
def test_merge_headers_with_list_of_tuples(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers([("h1", "h1")])
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("h2", "header2"),
("h1", "h1")
]))
session.close()
def test_merge_headers_with_list_of_tuples_duplicated_names(self):
session = ClientSession(
headers={
"h1": "header1",
"h2": "header2"
}, loop=self.loop)
headers = session._prepare_headers([("h1", "v1"),
("h1", "v2")])
self.assertIsInstance(headers, CIMultiDict)
self.assertEqual(headers, CIMultiDict([
("H2", "header2"),
("H1", "v1"),
("H1", "v2"),
]))
session.close()
def _make_one(self, **kwargs):
session = ClientSession(loop=self.loop, **kwargs)
params = dict(
headers={"Authorization": "Basic ..."},
max_redirects=2,
encoding="latin1",
version=aiohttp.HttpVersion10,
compress="deflate",
chunked=True,
expect100=True,
read_until_eof=False)
return session, params
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_GET(self, patched):
session, params = self._make_one()
session.get(
"http://test.example.com",
params={"x": 1},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("GET", "http://test.example.com",),
dict(
params={"x": 1},
allow_redirects=True,
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_OPTIONS(self, patched):
session, params = self._make_one()
session.options(
"http://opt.example.com",
params={"x": 2},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("OPTIONS", "http://opt.example.com",),
dict(
params={"x": 2},
allow_redirects=True,
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_HEAD(self, patched):
session, params = self._make_one()
session.head(
"http://head.example.com",
params={"x": 2},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("HEAD", "http://head.example.com",),
dict(
params={"x": 2},
allow_redirects=False,
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_POST(self, patched):
session, params = self._make_one()
session.post(
"http://post.example.com",
params={"x": 2},
data="Some_data",
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("POST", "http://post.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_PUT(self, patched):
session, params = self._make_one()
session.put(
"http://put.example.com",
params={"x": 2},
data="Some_data",
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("PUT", "http://put.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_PATCH(self, patched):
session, params = self._make_one()
session.patch(
"http://patch.example.com",
params={"x": 2},
data="Some_data",
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("PATCH", "http://patch.example.com",),
dict(
params={"x": 2},
data="Some_data",
**params)])
session.close()
@mock.patch("aiohttp.client.ClientSession._request")
def test_http_DELETE(self, patched):
session, params = self._make_one()
session.delete(
"http://delete.example.com",
params={"x": 2},
**params)
self.assertTrue(patched.called, "`ClientSession._request` not called")
self.assertEqual(
list(patched.call_args),
[("DELETE", "http://delete.example.com",),
dict(
params={"x": 2},
**params)])
session.close()
def test_close(self):
conn = self.make_open_connector()
session = ClientSession(loop=self.loop, connector=conn)
session.close()
self.assertIsNone(session.connector)
self.assertTrue(conn.closed)
def test_closed(self):
session = ClientSession(loop=self.loop)
self.assertFalse(session.closed)
session.close()
self.assertTrue(session.closed)
def test_connector(self):
connector = TCPConnector(loop=self.loop)
session = ClientSession(connector=connector, loop=self.loop)
self.assertIs(session.connector, connector)
session.close()
def test_connector_loop(self):
loop = asyncio.new_event_loop()
connector = TCPConnector(loop=loop)
with self.assertRaisesRegex(
ValueError,
"loop argument must agree with connector"):
ClientSession(connector=connector, loop=self.loop)
connector.close()
loop.close()
def test_cookies_are_readonly(self):
session = ClientSession(loop=self.loop)
with self.assertRaises(AttributeError):
session.cookies = 123
session.close()
def test_detach(self):
session = ClientSession(loop=self.loop)
conn = session.connector
self.assertFalse(conn.closed)
session.detach()
self.assertIsNone(session.connector)
self.assertTrue(session.closed)
self.assertFalse(conn.closed)
conn.close()
def test_request_closed_session(self):
@asyncio.coroutine
def go():
session = ClientSession(loop=self.loop)
session.close()
with self.assertRaises(RuntimeError):
yield from session.request('get', '/')
self.loop.run_until_complete(go())
def test_close_flag_for_closed_connector(self):
session = ClientSession(loop=self.loop)
conn = session.connector
self.assertFalse(session.closed)
conn.close()
self.assertTrue(session.closed)
def test_double_close(self):
conn = self.make_open_connector()
session = ClientSession(loop=self.loop, connector=conn)
session.close()
self.assertIsNone(session.connector)
session.close()
self.assertTrue(session.closed)
self.assertTrue(conn.closed)
def test_del(self):
conn = self.make_open_connector()
session = ClientSession(loop=self.loop, connector=conn)
self.loop.set_exception_handler(lambda loop, ctx: None)
with self.assertWarns(ResourceWarning):
del session
gc.collect()
def test_context_manager(self):
conn = self.make_open_connector()
with ClientSession(loop=self.loop, connector=conn) as session:
pass
self.assertTrue(session.closed)
def test_borrow_connector_loop(self):
conn = self.make_open_connector()
session = ClientSession(connector=conn)
self.assertIs(session._loop, self.loop)
session.close()
def test_reraise_os_error(self):
@asyncio.coroutine
def go():
err = OSError(1, "permission error")
req = mock.Mock()
req_factory = mock.Mock(return_value=req)
req.send = mock.Mock(side_effect=err)
session = ClientSession(loop=self.loop, request_class=req_factory)
@asyncio.coroutine
def create_connection(req):
# return self.transport, self.protocol
return mock.Mock(), mock.Mock()
session._connector._create_connection = create_connection
with self.assertRaises(aiohttp.ClientOSError) as ctx:
yield from session.request('get', 'http://example.com')
e = ctx.exception
self.assertEqual(e.errno, err.errno)
self.assertEqual(e.strerror, err.strerror)
self.loop.run_until_complete(go())
|
flying-sheep/aiohttp
|
tests/test_client_session.py
|
Python
|
apache-2.0
| 13,926 | 0 |
import unittest
from BTrees.OOBTree import OOBTree
from arche.interfaces import IObjectUpdatedEvent
from arche.interfaces import IWillLoginEvent
from arche.interfaces import IUser
from arche.testing import barebone_fixture
from pyramid import testing
from zope.interface.verify import verifyObject
from zope.interface.verify import verifyClass
from arche.api import User
from pyramid.request import apply_request_extensions
from pyramid.request import Request
from arche_pas.interfaces import IProviderData
from arche_pas.interfaces import IPASProvider
from arche_pas.exceptions import ProviderConfigError
class ProviderDataTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _cut(self):
from arche_pas.models import ProviderData
return ProviderData
def test_verify_object(self):
context = User()
self.failUnless(verifyObject(IProviderData, self._cut(context)))
def test_verify_class(self):
self.failUnless(verifyClass(IProviderData, self._cut))
def test_setitem(self):
context = User()
obj = self._cut(context)
obj['one'] = {'one': 1}
self.assertIsInstance(obj['one'], OOBTree)
class PASProviderTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _cut(self):
from arche_pas.models import PASProvider
return PASProvider
def _dummy_provider(self):
class DummyProvider(self._cut):
name = 'dummy'
title = 'Wakka'
settings = None
id_key = 'dummy_key'
default_settings = {'one': 1}
return DummyProvider
def test_verify_object(self):
request = testing.DummyRequest()
self.failUnless(verifyObject(IPASProvider, self._cut(request)))
def test_verify_class(self):
self.failUnless(verifyClass(IPASProvider, self._cut))
def test_settings(self):
factory = self._dummy_provider()
factory.update_settings({'two': 2}, three=3)
obj = factory(testing.DummyModel())
self.assertEqual(obj.settings, {'one': 1, 'two': 2, 'three': 3})
def test_settings_update_provider(self):
factory = self._dummy_provider()
factory.update_settings({'two': 2, 'provider': {'title': 'Hello'}})
obj = factory(testing.DummyModel())
self.assertEqual(obj.title, 'Hello')
def test_validate_settings_error(self):
factory = self._dummy_provider()
factory.update_settings(one=2)
self.assertRaises(ProviderConfigError, factory.validate_settings)
def test_validate_settings_default(self):
factory = self._dummy_provider()
factory.update_settings({
'client_id': 'client_id',
'auth_uri': 'auth_uri',
'token_uri': 'token_uri',
'client_secret': 'client_secret'
})
self.assertEqual(factory.validate_settings(), None)
def test_callback_url(self):
self.config.include('betahaus.viewcomponent')
self.config.include('arche_pas.views')
factory = self._dummy_provider()
request = Request.blank('/')
obj = factory(request)
self.assertEqual(obj.callback_url(), 'http://localhost/pas_callback/dummy')
def test_get_id(self):
self.config.include('arche_pas.models')
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
obj = self._dummy_provider()(testing.DummyModel())
self.assertEqual(obj.get_id(user), 'very_secret')
def test_get_user(self):
self.config.include('arche.testing')
self.config.include('arche.testing.catalog')
self.config.include('arche_pas.catalog')
self.config.include('arche_pas.models')
root = barebone_fixture(self.config)
request = testing.DummyRequest()
self.config.begin(request)
apply_request_extensions(request)
request.root = root
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
provider = self._dummy_provider()
self.config.registry.registerAdapter(provider, name=provider.name)
root['users']['jane'] = user
query = "pas_ident == ('dummy', 'very_secret')"
docids = root.catalog.query(query)[1]
self.assertEqual(tuple(request.resolve_docids(docids))[0], user)
obj = provider(request)
self.assertEqual(obj.get_user('very_secret'), user)
# def test_build_reg_case_params(self):
# request = testing.DummyRequest()
# factory = self._dummy_provider()
# obj = factory(request)
# data = {
#
# }
# obj.build_reg_case_params(data)
# def prepare_register(self, request, data):
#
# def login(self, user, request, first_login = False, came_from = None):
#
def test_login(self):
from arche.resources import User
request = testing.DummyRequest()
root = barebone_fixture(self.config)
root['users']['jane'] = user = User()
L = []
def subscriber(event):
L.append(event)
self.config.add_subscriber(subscriber, IWillLoginEvent)
factory = self._dummy_provider()
obj = factory(request)
obj.login(user)
self.assertEqual(L[0].user, user)
def test_store(self):
self.config.include('arche.testing')
self.config.include('arche.testing.catalog')
self.config.include('arche_pas.catalog')
self.config.include('arche_pas.models')
root = barebone_fixture(self.config)
request = testing.DummyRequest()
apply_request_extensions(request)
request.root = root
self.config.begin(request)
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
provider = self._dummy_provider()
self.config.registry.registerAdapter(provider, name=provider.name)
root['users']['jane'] = user
obj = provider(request)
L = []
def subsc(obj, event):
L.append(event)
self.config.add_subscriber(subsc, [IUser, IObjectUpdatedEvent])
obj.store(user, {'hello': 'world', 1: 2})
self.assertIn('pas_ident', L[0].changed)
def test_store_saves_new_keys(self):
self.config.include('arche.testing')
self.config.include('arche.testing.catalog')
self.config.include('arche_pas.models')
self.config.include('arche_pas.catalog')
root = barebone_fixture(self.config)
request = testing.DummyRequest()
apply_request_extensions(request)
request.root = root
self.config.begin(request)
user = User()
provider_data = IProviderData(user)
provider_data['dummy'] = {'dummy_key': 'very_secret'}
provider = self._dummy_provider()
self.config.registry.registerAdapter(provider, name=provider.name)
root['users']['jane'] = user
obj = provider(request)
self.assertEqual(obj.store(user, {'hello': 'world', 1: 2}), set(['hello', 1]))
self.assertEqual(obj.store(user, {'hello': 'world', 1: 2}), set())
# hello removed
self.assertEqual(obj.store(user, {1: 2}), set())
self.assertNotIn('hello', provider_data['dummy'])
# 1 was updated
self.assertEqual(obj.store(user, {1: 3}), set([1]))
class AddPASTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _fut(self):
from arche_pas.models import add_pas
return add_pas
# FIXME: Proper tests for add_pas
class RegistrationCaseTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
def tearDown(self):
testing.tearDown()
@property
def _cut(self):
from arche_pas.models import RegistrationCase
return RegistrationCase
def test_cmp_crit(self):
def hello():
pass
one = self._cut('one', callback=hello)
two = self._cut('two', callback=hello)
self.assertRaises(ValueError, one.cmp_crit, two)
class GetRegisterCaseTests(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.config.include('arche_pas.models')
self.config.include('arche_pas.registration_cases')
def tearDown(self):
testing.tearDown()
@property
def _fut(self):
from arche_pas.models import get_register_case
return get_register_case
def test_case_1(self):
match_params = dict(
require_authenticated=None, # Irrelevant alternative
email_validated_provider=True,
email_validated_locally=True,
user_exist_locally=True, # Irrelevant alternative, must always exist
email_from_provider=True,
provider_validation_trusted=True,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case1')
match_params['require_authenticated'] = False
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case1')
match_params['require_authenticated'] = True
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case1')
match_params['user_exist_locally'] = True # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case1')
def test_case_2(self):
match_params = dict(
require_authenticated=True,
email_validated_provider=True,
email_validated_locally=False,
user_exist_locally=True,
email_from_provider=True,
provider_validation_trusted=True,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case2')
def test_case_3(self):
match_params = dict(
require_authenticated=False,
email_validated_provider=True,
email_validated_locally=False,
user_exist_locally=True,
email_from_provider=True,
provider_validation_trusted=True,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case3')
def test_case_4(self):
match_params = dict(
require_authenticated=False,
email_validated_provider=True,
# email_validated_locally=None, #Irrelevant, since user shouldn't exist
user_exist_locally=False,
email_from_provider=True,
provider_validation_trusted=True,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case4')
match_params['email_validated_locally'] = False # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case4')
def test_case_5(self):
match_params = dict(
require_authenticated=True,
email_validated_provider=True,
# email_validated_locally=None, #Shouldn't matter, since user didn't match
user_exist_locally=False,
email_from_provider=True,
provider_validation_trusted=True,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case5')
match_params['email_validated_locally'] = False # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case5')
match_params['email_validated_locally'] = True # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case5')
def test_case_6(self):
match_params = dict(
require_authenticated=True,
email_validated_provider=False,
email_validated_locally=True,
user_exist_locally=True,
email_from_provider=True,
provider_validation_trusted=False,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case6')
def test_case_7(self):
match_params = dict(
require_authenticated=False,
email_validated_provider=False,
email_validated_locally=True,
user_exist_locally=True,
email_from_provider=True,
provider_validation_trusted=False,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case7')
def test_case_8(self):
match_params = dict(
require_authenticated=True,
email_validated_provider=True, # Irrelevant
email_validated_locally=False,
user_exist_locally=True,
email_from_provider=True,
provider_validation_trusted=False,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case8')
match_params['email_validated_provider'] = False # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case8')
def test_case_9(self):
match_params = dict(
require_authenticated=True,
# email_validated_provider=True,
email_validated_locally=False,
user_exist_locally=False,
email_from_provider=True,
provider_validation_trusted=False,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case9')
def test_case_10(self):
match_params = dict(
require_authenticated=False,
email_validated_provider=True, # Irrelevant
email_validated_locally=False,
user_exist_locally=True,
email_from_provider=True,
provider_validation_trusted=False,
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case10')
match_params['email_validated_provider'] = False # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case10')
def test_case_11(self):
match_params = dict(
require_authenticated=False,
email_validated_provider=False, # Irrelevant
email_validated_locally=False,
user_exist_locally=False,
email_from_provider=True,
provider_validation_trusted=False, # Should work regardless
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case11')
match_params['email_validated_provider'] = True # Shouldn't matter
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case11')
def test_case_12(self):
match_params = dict(
require_authenticated=True,
email_validated_provider=False, # Irrelevant
email_validated_locally=False, # Check both
user_exist_locally=False,
email_from_provider=False,
provider_validation_trusted=False, # Should work regardless
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case12')
# Check all irrelevant options
match_params_alt = match_params.copy()
match_params_alt['email_validated_provider'] = True
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case12')
match_params_alt = match_params.copy()
match_params_alt['provider_validation_trusted'] = True
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case12')
def test_case_13(self):
match_params = dict(
require_authenticated=False,
email_validated_provider=False, # Irrelevant
email_validated_locally=False, # Check both
user_exist_locally=False,
email_from_provider=False,
provider_validation_trusted=False, # Should work regardless
)
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case13')
# Check all irrelevant options
match_params_alt = match_params.copy()
match_params_alt['email_validated_provider'] = True
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case13')
match_params_alt = match_params.copy()
match_params_alt['provider_validation_trusted'] = True
util = self._fut(registry=self.config.registry, **match_params)
self.assertEqual(util.name, 'case13')
|
ArcheProject/arche_pas
|
arche_pas/tests/test_models.py
|
Python
|
gpl-2.0
| 17,498 | 0.000343 |
import os
import pickle
import socket
import sys
import threading
import struct
import time
import yaml
# from gen_scraper import scraper
def handle_slave(slaveconn_info, config_info, work_info):
# print(v_chunk)
"""
handles a slave connection, sending config, work, or receiving more results.
params:
slavesocket: <socket> connection to slave
address: IP addr, port of slave
config: <dict> config
v_list: <list> of places to visit and partially scraped stuff.
"""
# recv type of communication
slaveconn = slaveconn_info['conn']
address = slaveconn_info['address']
# print(address)
config = config_info['config']
config_dump = config_info['config_dump']
v_list = work_info['v_list']
work_loc = work_info['work_loc']
r_lock = work_info['r_lock']
print('HANDLING SLAVE', address)
comm = slaveconn.recv(4)
comm = comm.decode()
print(comm)
# if asking for config, send initial files to client.
if comm == 'CNFG':
# send config
# convert config to bytes (will have to use pickle)
byte_len = len(config_dump)
# print(byte_len)
byte_len = struct.pack('!I', byte_len)
# print(byte_len)
slaveconn.send(byte_len)
slaveconn.send(config_dump)
# if asking for work:
elif comm == 'WORK':
# send queue chunk.
# in dict, track IP : (chunk, time)
# TODO: check work_loc for work which has been out for too long. Assign that instead of new chunk if so.
chunk_len = min([config['batch size'], len(v_list)]) # length is either batch size or size of queue
# TODO: must lock this.
r_lock.acquire()
v_chunk = [v_list.pop(0) for _ in range(chunk_len)] # get a chunk
work_loc[address] = (v_chunk, time.time()) # add chunk to work loc with time
r_lock.release()
chunk_pickle = pickle.dumps(v_chunk)
byte_len = len(chunk_pickle)
# print(byte_len)
byte_len = struct.pack('!I', byte_len)
slaveconn.send(byte_len)
slaveconn.send(chunk_pickle)
# if sending back results:
elif comm == 'RSLT':
# append should be thread safe
# append results to queue
# send new chunk
# update IP dict
res = slaveconn.recv(1024)
print(res.decode())
print('DONE WITH SLAVE', address)
def get_config(directory='', config_file_loc=''):
print('STARTING MASTER')
# print(directory)
# config file location
config_file_loc = os.path.join(directory, config_file_loc)
# master reads and parses config file, creates objects for scraping
with open(config_file_loc, 'rb') as config_file:
config = yaml.load(config_file)
# grab user's scraper class and put its methods into our scraper
scraper_loc = os.path.join(directory, config['scraper']['loc'])
'''
u_module = SourceFileLoader(config['scraper']['module'], scraper_loc).load_module()
u_scraper = getattr(u_module, config['scraper']['class'])
u_processor = getattr(u_module, config['processor']['class'])
print(dir(u_module))
print(dir(u_scraper))
print(dir(u_processor))
'''
with open(scraper_loc) as u_file:
scraper_script = u_file.read()
scraper_loc = os.path.join(directory, config['processor']['loc'])
with open(scraper_loc) as u_file:
processor_script = u_file.read()
config_dump = pickle.dumps([config, scraper_script, processor_script])
return config, config_dump
# send entire user-created file over socket.
# slave uses cmdline to exec file.
# u_scraper_attr = [x for x in dir(u_scraper) if '__' not in x]
# u_processor_attr = [x for x in dir(u_processor) if '__' not in x]
# u_scraper.test()
# print(config)
# master generates initial queue
def main(argv=sys.argv):
# main loop of
# argv[1] is cwd, argv[2] is config file loc
config, config_dump = get_config(directory=argv[1], config_file_loc=argv[2])
run(config, config_dump)
def run(config, config_dump):
v_list = [] # list of pages to visit and their data of form -> function_name, args, data{}
work_loc = {} # dict of work location of form -> (address): (work, time)
r_lock = threading.Lock()
# populate v_list with 1 initial entry
# data is passed with dict w/ function_name, args, data{}
for x in config['start']:
init_bundle = {'function':x['function'], 'args':x['args'], 'data':{}}
v_list.append(init_bundle)
# print(v_list)
# receives connection
slave_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
slave_sock.bind(('', 666))
slave_sock.listen(5)
# enters main loop
while True:
slaveconn, address = slave_sock.accept()
# print('Got conn from', address)
# now do something with the clientsocket
# in this case, we'll pretend this is a threaded server
# construct v_chunk
slaveconn_info = {'conn':slaveconn, 'address':address}
config_info = {'config':config, 'config_dump': config_dump}
work_info = {'v_list':v_list, 'work_loc': work_loc, 'r_lock':r_lock}
print(v_list)
ct = threading.Thread(target=handle_slave, args=[slaveconn_info, config_info, work_info])
ct.start()
if __name__ == '__main__':
test = ['', 'C:\\Users\\setzer\\Documents\\GitHub\\gen_scraper\\example', 'example_config.yml']
main()
|
puddl3glum/gen_scraper
|
build/lib/gen_scraper/gen_master.py
|
Python
|
mit
| 5,572 | 0.006999 |
# -*- coding: utf-8 -*-
"""
.. module:: utils
"""
from django.contrib.auth.models import User
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.utils.text import slugify
from apps.volontulo.models import UserProfile
# Offers statuses dictionary with meaningful names.
# todo: remove dependency
OFFERS_STATUSES = {
'NEW': 'Nowa',
'ACTIVE': 'Aktywna',
'FINISHED': 'Zakończona',
'SUSPENDED': 'Zawieszona',
'CLOSED': 'Zamknięta',
}
def get_administrators_emails():
"""Get all administrators emails or superuser email
Format returned:
emails = {
1: 'admin1@example.com',
2: 'admin2@example.com',
}
"""
administrators = UserProfile.objects.filter(is_administrator=True)
emails = {}
for admin in administrators:
emails[str(admin.user.id)] = admin.user.email
if not emails:
administrators = User.objects.filter(is_superuser=True)
for admin in administrators:
emails[str(admin.id)] = admin.email
return emails
def correct_slug(model_class, view_name, slug_field):
"""Decorator that is reposponsible for redirect to url with correct slug.
It is used by url for offers, organizations and users.
"""
def decorator(wrapped_func):
"""Decorator function for correcting slugs."""
def wrapping_func(request, slug, id_):
"""Wrapping function for correcting slugs."""
obj = get_object_or_404(model_class, id=id_)
if slug != slugify(getattr(obj, slug_field)):
return redirect(
view_name,
slug=slugify(getattr(obj, slug_field)),
id_=id_
)
return wrapped_func(request, slug, id_)
return wrapping_func
return decorator
|
magul/volontulo
|
backend/apps/volontulo/utils.py
|
Python
|
mit
| 1,857 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.template import Library
from ..models import Counter
register = Library()
@register.assignment_tag
def counter_for_object(name, obj, default=0):
"""Returns the counter value for the given name and instance."""
try:
return Counter.objects.get_for_object(name, obj).value
except (Exception, ):
return default
|
natural/django-objectcounters
|
objectcounters/templatetags/counter_tags.py
|
Python
|
bsd-3-clause
| 398 | 0 |
from collections import defaultdict
import paddle.v2.framework.framework as framework
from paddle.v2.framework.backward import append_backward_ops
from paddle.v2.framework.regularizer import append_regularization_ops
__all__ = [
'SGDOptimizer', 'MomentumOptimizer', 'AdagradOptimizer', 'AdamOptimizer',
'AdamaxOptimizer'
]
class Optimizer(object):
"""Optimizer Base class.
Define the common interface of an optimizer.
User should not use this class directly,
but need to use one of it's implementation.
"""
def __init__(self, global_step=None):
self._global_step = global_step
# Dictionary of accumulators. Some optimizer subclasses need to
# allocate and manage extra variables associated with the parameters
# to train. These variables are called accumulators.
# {accum_name : { paramter_name : accumulator_for_parameter, ...}, ...}
self._accumulators = defaultdict(lambda: dict())
def _append_optimize_op(self, block, param_and_grad):
""" append optimize operator to block and return all the added optimize_op
"""
raise NotImplementedError()
def _initialize_tensors(self, block):
"""Create all necessary tensors, that will be shared for all parameter updates.
Tensors like learning rate should be initialized here.
Args:
block: the block in which the loss variable is present
"""
pass
def _create_accumulators(self, block, parameters):
"""Create all accumulators needed by the parameters
Args:
block: the block in which the loss variable is present
parameters: list of parameter variables for the optimizer
"""
pass
def _finish_update(self, block):
"""Finish any custom updates needed
before completing an optimization step
Args:
block: the block in which the loss variable is present
parameters: list of parameter variables for the optimizer
Returns:
list of finish ops or None
"""
pass
def _add_accumulator(self, block, name, param, dtype=None, fill_value=0.0):
"""Utility function to add an accumulator for a parameter
Args:
block: the block in which the loss variable is present
name: name of the accumulator
param: parameter variable for which accumulator is to be added
dtype: data type of the accumulator variable
fill_value: value to initialize the accumulator variable
"""
if (name in self._accumulators and
param.name in self._accumulators[name]):
raise Exception("Accumulator {} already exists for parmeter {}".
format(name, param.name))
global_block = block.program.global_block()
param_shape = list(param.shape)
param_acc = global_block.create_var(
dtype=dtype, shape=param_shape, lod_level=0)
# Initialize the accumulator with fill_value
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
global_block.append_op(
type="fill_constant",
outputs={"Out": param_acc},
attrs={"shape": param_shape,
"value": fill_value})
# Add to accumulators dict
self._accumulators[name][param.name] = param_acc
def _get_accumulator(self, name, param):
"""Utility function to fetch an accumulator for a parameter
Args:
name: name of the accumulator
param: parameter variable for which accumulator is to be fetched
Returns:
accumulator variable for the parameter
"""
if (name not in self._accumulators or
param.name not in self._accumulators[name]):
raise Exception("Accumulator {} does not exist for parameter {}".
format(name, param.name))
return self._accumulators[name][param.name]
def _increment_global_step(self, block):
"""Increment the global step by 1 after every iteration
Args:
block: the block in which the loss variable is present
Returns:
list with global_step increment op as its only element
"""
assert isinstance(block, framework.Block)
assert self._global_step is not None
# create the increment op
increment_op = block.append_op(
type="increment",
inputs={"X": self._global_step},
outputs={"Out": self._global_step},
attrs={"step": 1.0})
return increment_op
def create_optimization_pass(self, parameters_and_grads, loss):
"""Add optimization operators to update gradients to variables.
Args:
loss: the target that this optimization is for.
parameters_and_grads: a list of (variable, gradient) pair to update.
Returns:
return_op_list: a list of operators that will complete one step of
optimization. This will include parameter update ops, global step
update ops and any other custom ops required by subclasses to manage
their internal state.
"""
# This is a default implementation of create_optimization_pass that
# can be shared by most optimizers. This implementation assumes that
# the subclass will implement the _append_optimize_op method and the
# _initialize_tensors method. The subclass can extend the
# _create_accumulators method if it needs to create accumulators
# for parameters and extend _finish_update method to add custom ops.
# Create any accumulators
self._create_accumulators(loss.block,
[p[0] for p in parameters_and_grads])
# Create any necessary tensors
self._initialize_tensors(loss.block)
optimize_ops = []
for param_and_grad in parameters_and_grads:
if param_and_grad[1] is not None:
optimize_op = self._append_optimize_op(loss.block,
param_and_grad)
optimize_ops.append(optimize_op)
# Returned list of ops can include more ops in addition
# to optimization ops
return_ops = optimize_ops
# Get custom finish ops for subclasses
# FIXME: Need to fix this once we figure out how to handle dependencies
finish_ops = self._finish_update(loss.block)
if finish_ops is not None:
return_ops += finish_ops
if self._global_step is not None:
return_ops.append(self._increment_global_step(loss.block))
return return_ops
def minimize(self, loss, parameter_list=None, no_grad_set=None):
"""Add operations to minimize `loss` by updating `parameter_list`.
This method combines interface `append_backward_ops()` and
`create_optimization_pass()` into one.
"""
params_grads = append_backward_ops(loss, parameter_list, no_grad_set or
set())
# Add regularization if any
params_grads = append_regularization_ops(params_grads)
optimize_ops = self.create_optimization_pass(params_grads, loss)
return optimize_ops
class SGDOptimizer(Optimizer):
""" Simple SGD optimizer without any state.
"""
def __init__(self, learning_rate, global_step=None):
assert learning_rate is not None
super(SGDOptimizer, self).__init__(global_step)
self.type = "sgd"
self._learning_rate = learning_rate
def _initialize_tensors(self, block):
assert isinstance(block, framework.Block)
lr_shape = [1]
# create a variable for learning_rate
self._lr = block.create_var(
dtype="float32", shape=lr_shape, lod_level=0)
# create an op to init the learning_rate
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
block.append_op(
type="fill_constant",
outputs={"Out": self._lr},
attrs={"shape": lr_shape,
"value": self._learning_rate})
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
# create the optimize op
sgd_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": self._lr
},
outputs={"ParamOut": param_and_grad[0]})
return sgd_op
class MomentumOptimizer(Optimizer):
"""Simple Momentum optimizer with velocity state
"""
_velocity_acc_str = "velocity"
def __init__(self,
learning_rate,
momentum,
use_nesterov=False,
global_step=None):
assert learning_rate is not None
assert momentum is not None
super(MomentumOptimizer, self).__init__(global_step)
self.type = "momentum"
self._learning_rate = learning_rate
self._momentum = momentum
self._use_nesterov = bool(use_nesterov)
def _initialize_tensors(self, block):
assert isinstance(block, framework.Block)
lr_shape = [1]
# create a variable for learning_rate
self._lr = block.create_var(
dtype="float32", shape=lr_shape, lod_level=0)
# create an op to init the learning_rate
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
block.append_op(
type="fill_constant",
outputs={"Out": self._lr},
attrs={"shape": lr_shape,
"value": self._learning_rate})
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
for p in parameters:
self._add_accumulator(block, self._velocity_acc_str, p, 'float32')
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
velocity_acc = self._get_accumulator(self._velocity_acc_str,
param_and_grad[0])
# create the momentum optimize op
momentum_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Velocity": velocity_acc,
"LearningRate": self._lr
},
outputs={
"ParamOut": param_and_grad[0],
"VelocityOut": velocity_acc
},
attrs={"mu": self._momentum,
"useNesterov": self._use_nesterov})
return momentum_op
class AdagradOptimizer(Optimizer):
"""Simple Adagrad optimizer with moment state
"""
_moment_acc_str = "moment"
def __init__(self, learning_rate, epsilon=1.0e-6, global_step=None):
assert learning_rate is not None
assert epsilon is not None
super(AdagradOptimizer, self).__init__(global_step)
self.type = "adagrad"
self._learning_rate = learning_rate
self._epsilon = epsilon
def _initialize_tensors(self, block):
assert isinstance(block, framework.Block)
lr_shape = [1]
# create a variable for learning_rate
self._lr = block.create_var(
dtype="float32", shape=lr_shape, lod_level=0)
# create an op to init the learning_rate
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
block.append_op(
type="fill_constant",
outputs={"Out": self._lr},
attrs={"shape": lr_shape,
"value": self._learning_rate})
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
for p in parameters:
self._add_accumulator(block, self._moment_acc_str, p, 'float32')
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment_acc = self._get_accumulator(self._moment_acc_str,
param_and_grad[0])
# create the adagrad optimizer op
adagrad_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"Moment": moment_acc,
"LearningRate": self._lr
},
outputs={"ParamOut": param_and_grad[0],
"MomentOut": moment_acc},
attrs={"epsilon": self._epsilon})
return adagrad_op
class AdamOptimizer(Optimizer):
"""Implements the Adam Optimizer
"""
_moment1_acc_str = "moment1"
_moment2_acc_str = "moment2"
def __init__(self,
learning_rate=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8,
global_step=None):
assert learning_rate is not None
assert beta1 is not None
assert beta2 is not None
assert epsilon is not None
super(AdamOptimizer, self).__init__(global_step)
self.type = "adam"
self._learning_rate = learning_rate
self._beta1 = beta1
self._beta2 = beta2
self._epsilon = epsilon
def _initialize_tensors(self, block):
assert isinstance(block, framework.Block)
lr_shape = [1]
# create a variable for learning_rate
self._lr = block.create_var(
dtype="float32", shape=lr_shape, lod_level=0)
# create an op to init the learning_rate
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
block.append_op(
type="fill_constant",
outputs={"Out": self._lr},
attrs={"shape": lr_shape,
"value": self._learning_rate})
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
global_block = block.program.global_block()
# Create beta1 and beta2 power tensors
beta_shape = [1]
# Create variables for beta1 and beta2 powers
self._beta1_pow_acc = global_block.create_var(
dtype="float32", shape=beta_shape, lod_level=0)
self._beta2_pow_acc = global_block.create_var(
dtype="float32", shape=beta_shape, lod_level=0)
# Initialize beta1 and beta2 power accumulators
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
global_block.append_op(
type="fill_constant",
outputs={"Out": self._beta1_pow_acc},
attrs={"shape": beta_shape,
"value": self._beta1})
global_block.append_op(
type="fill_constant",
outputs={"Out": self._beta2_pow_acc},
attrs={"shape": beta_shape,
"value": self._beta2})
# Create accumulator tensors for first and second moments
for p in parameters:
self._add_accumulator(block, self._moment1_acc_str, p, 'float32')
self._add_accumulator(block, self._moment2_acc_str, p, 'float32')
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment1 = self._get_accumulator(self._moment1_acc_str,
param_and_grad[0])
moment2 = self._get_accumulator(self._moment2_acc_str,
param_and_grad[0])
# create the adam optimize op
adam_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": self._lr,
"Moment1": moment1,
"Moment2": moment2,
"Beta1Pow": self._beta1_pow_acc,
"Beta2Pow": self._beta2_pow_acc
},
outputs={
"ParamOut": param_and_grad[0],
"Moment1Out": moment1,
"Moment2Out": moment2
},
attrs={
"beta1": self._beta1,
"beta2": self._beta2,
"epsilon": self._epsilon
})
return adam_op
def _finish_update(self, block):
"""Update Beta1 and Beta2 Power accumulators
"""
assert isinstance(block, framework.Block)
global_block = block.program.global_block()
scale_beta1 = global_block.append_op(
type="scale",
inputs={"X": self._beta1_pow_acc},
outputs={"Out": self._beta1_pow_acc},
attrs={"scale": self._beta1})
scale_beta2 = global_block.append_op(
type="scale",
inputs={"X": self._beta2_pow_acc},
outputs={"Out": self._beta2_pow_acc},
attrs={"scale": self._beta2})
return [scale_beta1, scale_beta2]
class AdamaxOptimizer(Optimizer):
"""Implements the Adamax Optimizer
"""
_moment_acc_str = "moment"
_inf_norm_acc_str = "inf_norm"
def __init__(self,
learning_rate=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8,
global_step=None):
assert learning_rate is not None
assert beta1 is not None
assert beta2 is not None
assert epsilon is not None
super(AdamaxOptimizer, self).__init__()
self.type = "adamax"
self._learning_rate = learning_rate
self._beta1 = beta1
self._beta2 = beta2
self._epsilon = epsilon
def _initialize_tensors(self, block):
assert isinstance(block, framework.Block)
lr_shape = [1]
# create a variable for learning_rate
self._lr = block.create_var(
dtype="float32", shape=lr_shape, lod_level=0)
# create an op to init the learning_rate
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
block.append_op(
type="fill_constant",
outputs={"Out": self._lr},
attrs={"shape": lr_shape,
"value": self._learning_rate})
def _create_accumulators(self, block, parameters):
assert isinstance(block, framework.Block)
global_block = block.program.global_block()
# Create beta1 power accumulator tensor
beta_shape = [1]
self._beta1_pow_acc = global_block.create_var(
dtype="float32", shape=beta_shape, lod_level=0)
# Initialize beta1 power accumulator
# FIXME: Fix when Initialization design has been implemented
# https://github.com/PaddlePaddle/Paddle/pull/4852
global_block.append_op(
type="fill_constant",
outputs={"Out": self._beta1_pow_acc},
attrs={"shape": beta_shape,
"value": self._beta1})
# Create accumulator tensors for first moment and infinity norm
for p in parameters:
self._add_accumulator(block, self._moment_acc_str, p, 'float32')
self._add_accumulator(block, self._inf_norm_acc_str, p, 'float32')
def _append_optimize_op(self, block, param_and_grad):
assert isinstance(block, framework.Block)
moment = self._get_accumulator(self._moment_acc_str, param_and_grad[0])
inf_norm = self._get_accumulator(self._inf_norm_acc_str,
param_and_grad[0])
# create the adamax optimize op
adamax_op = block.append_op(
type=self.type,
inputs={
"Param": param_and_grad[0],
"Grad": param_and_grad[1],
"LearningRate": self._lr,
"Moment": moment,
"InfNorm": inf_norm,
"Beta1Pow": self._beta1_pow_acc
},
outputs={
"ParamOut": param_and_grad[0],
"MomentOut": moment,
"InfNormOut": inf_norm
},
attrs={
"beta1": self._beta1,
"beta2": self._beta2,
"epsilon": self._epsilon
})
return adamax_op
def _finish_update(self, block):
"""Update Beta1 Power accumulator
"""
assert isinstance(block, framework.Block)
global_block = block.program.global_block()
scale_beta1 = global_block.append_op(
type="scale",
inputs={"X": self._beta1_pow_acc},
outputs={"Out": self._beta1_pow_acc},
attrs={"scale": self._beta1})
return [scale_beta1]
|
pengli09/Paddle
|
python/paddle/v2/framework/optimizer.py
|
Python
|
apache-2.0
| 21,289 | 0.000141 |
import logging
from pyvirtualdisplay.display import Display
from pyvirtualdisplay.about import __version__
log = logging.getLogger(__name__)
log = logging.getLogger(__name__)
log.debug('version=%s', __version__)
|
suninsky/ReceiptOCR
|
Python/server/lib/python2.7/site-packages/pyvirtualdisplay/__init__.py
|
Python
|
mit
| 216 | 0 |
from __future__ import with_statement, absolute_import
from django.contrib import admin
from django.contrib.admin.options import IncorrectLookupParameters
from django.contrib.admin.views.main import ChangeList, SEARCH_VAR, ALL_VAR
from django.contrib.auth.models import User
from django.template import Context, Template
from django.test import TestCase
from django.test.client import RequestFactory
from .admin import (ChildAdmin, QuartetAdmin, BandAdmin, ChordsBandAdmin,
GroupAdmin, ParentAdmin, DynamicListDisplayChildAdmin,
DynamicListDisplayLinksChildAdmin, CustomPaginationAdmin,
FilteredChildAdmin, CustomPaginator, site as custom_site,
SwallowAdmin)
from .models import (Child, Parent, Genre, Band, Musician, Group, Quartet,
Membership, ChordsMusician, ChordsBand, Invitation, Swallow,
UnorderedObject, OrderedObject)
class ChangeListTests(TestCase):
urls = "regressiontests.admin_changelist.urls"
def setUp(self):
self.factory = RequestFactory()
def _create_superuser(self, username):
return User.objects.create(username=username, is_superuser=True)
def _mocked_authenticated_request(self, url, user):
request = self.factory.get(url)
request.user = user
return request
def test_select_related_preserved(self):
"""
Regression test for #10348: ChangeList.get_query_set() shouldn't
overwrite a custom select_related provided by ModelAdmin.queryset().
"""
m = ChildAdmin(Child, admin.site)
request = self.factory.get('/child/')
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
self.assertEqual(cl.query_set.query.select_related, {'parent': {'name': {}}})
def test_result_list_empty_changelist_value(self):
"""
Regression test for #14982: EMPTY_CHANGELIST_VALUE should be honored
for relationship fields
"""
new_child = Child.objects.create(name='name', parent=None)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">(None)</td></tr></tbody>' % new_child.id
self.assertFalse(table_output.find(row_html) == -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_html(self):
"""
Verifies that inclusion tag result_list generates a table when with
default ModelAdmin settings.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
cl = ChangeList(request, Child, list_display, list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.formset = None
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
row_html = '<tbody><tr class="row1"><th><a href="%d/">name</a></th><td class="nowrap">Parent object</td></tr></tbody>' % new_child.id
self.assertFalse(table_output.find(row_html) == -1,
'Failed to find expected row element: %s' % table_output)
def test_result_list_editable_html(self):
"""
Regression tests for #11791: Inclusion tag result_list generates a
table and this checks that the items are nested within the table
element tags.
Also a regression test for #13599, verifies that hidden fields
when list_editable is enabled are rendered in a div outside the
table.
"""
new_parent = Parent.objects.create(name='parent')
new_child = Child.objects.create(name='name', parent=new_parent)
request = self.factory.get('/child/')
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
FormSet = m.get_changelist_formset(request)
cl.formset = FormSet(queryset=cl.result_list)
template = Template('{% load admin_list %}{% spaceless %}{% result_list cl %}{% endspaceless %}')
context = Context({'cl': cl})
table_output = template.render(context)
# make sure that hidden fields are in the correct place
hiddenfields_div = '<div class="hiddenfields"><input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></div>' % new_child.id
self.assertFalse(table_output.find(hiddenfields_div) == -1,
'Failed to find hidden fields in: %s' % table_output)
# make sure that list editable fields are rendered in divs correctly
editable_name_field = '<input name="form-0-name" value="name" class="vTextField" maxlength="30" type="text" id="id_form-0-name" />'
self.assertFalse('<td>%s</td>' % editable_name_field == -1,
'Failed to find "name" list_editable field in: %s' % table_output)
def test_result_list_editable(self):
"""
Regression test for #14312: list_editable with pagination
"""
new_parent = Parent.objects.create(name='parent')
for i in range(200):
new_child = Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/', data={'p': -1}) # Anything outside range
m = ChildAdmin(Child, admin.site)
# Test with list_editable fields
m.list_display = ['id', 'name', 'parent']
m.list_display_links = ['id']
m.list_editable = ['name']
self.assertRaises(IncorrectLookupParameters, lambda: \
ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m))
def test_custom_paginator(self):
new_parent = Parent.objects.create(name='parent')
for i in range(200):
new_child = Child.objects.create(name='name %s' % i, parent=new_parent)
request = self.factory.get('/child/')
m = CustomPaginationAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
self.assertIsInstance(cl.paginator, CustomPaginator)
def test_distinct_for_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Basic ManyToMany.
"""
blues = Genre.objects.create(name='Blues')
band = Band.objects.create(name='B.B. King Review', nr_of_members=11)
band.genres.add(blues)
band.genres.add(blues)
m = BandAdmin(Band, admin.site)
request = self.factory.get('/band/', data={'genres': blues.pk})
cl = ChangeList(request, Band, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Group instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_through_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. With an intermediate model.
"""
lead = Musician.objects.create(name='Vox')
band = Group.objects.create(name='The Hype')
Membership.objects.create(group=band, music=lead, role='lead voice')
Membership.objects.create(group=band, music=lead, role='bass player')
m = GroupAdmin(Group, admin.site)
request = self.factory.get('/group/', data={'members': lead.pk})
cl = ChangeList(request, Group, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Group instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_inherited_m2m_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Model managed in the
admin inherits from the one that defins the relationship.
"""
lead = Musician.objects.create(name='John')
four = Quartet.objects.create(name='The Beatles')
Membership.objects.create(group=four, music=lead, role='lead voice')
Membership.objects.create(group=four, music=lead, role='guitar player')
m = QuartetAdmin(Quartet, admin.site)
request = self.factory.get('/quartet/', data={'members': lead.pk})
cl = ChangeList(request, Quartet, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one Quartet instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_m2m_to_inherited_in_list_filter(self):
"""
Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Target of the relationship
inherits from another.
"""
lead = ChordsMusician.objects.create(name='Player A')
three = ChordsBand.objects.create(name='The Chords Trio')
Invitation.objects.create(band=three, player=lead, instrument='guitar')
Invitation.objects.create(band=three, player=lead, instrument='bass')
m = ChordsBandAdmin(ChordsBand, admin.site)
request = self.factory.get('/chordsband/', data={'members': lead.pk})
cl = ChangeList(request, ChordsBand, m.list_display,
m.list_display_links, m.list_filter, m.date_hierarchy,
m.search_fields, m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
cl.get_results(request)
# There's only one ChordsBand instance
self.assertEqual(cl.result_count, 1)
def test_distinct_for_non_unique_related_object_in_list_filter(self):
"""
Regressions tests for #15819: If a field listed in list_filters
is a non-unique related object, distinct() must be called.
"""
parent = Parent.objects.create(name='Mary')
# Two children with the same name
Child.objects.create(parent=parent, name='Daniel')
Child.objects.create(parent=parent, name='Daniel')
m = ParentAdmin(Parent, admin.site)
request = self.factory.get('/parent/', data={'child__name': 'Daniel'})
cl = ChangeList(request, Parent, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
# Make sure distinct() was called
self.assertEqual(cl.query_set.count(), 1)
def test_distinct_for_non_unique_related_object_in_search_fields(self):
"""
Regressions tests for #15819: If a field listed in search_fields
is a non-unique related object, distinct() must be called.
"""
parent = Parent.objects.create(name='Mary')
Child.objects.create(parent=parent, name='Danielle')
Child.objects.create(parent=parent, name='Daniel')
m = ParentAdmin(Parent, admin.site)
request = self.factory.get('/parent/', data={SEARCH_VAR: 'daniel'})
cl = ChangeList(request, Parent, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page,
m.list_max_show_all, m.list_editable, m)
# Make sure distinct() was called
self.assertEqual(cl.query_set.count(), 1)
def test_pagination(self):
"""
Regression tests for #12893: Pagination in admins changelist doesn't
use queryset set by modeladmin.
"""
parent = Parent.objects.create(name='anything')
for i in range(30):
Child.objects.create(name='name %s' % i, parent=parent)
Child.objects.create(name='filtered %s' % i, parent=parent)
request = self.factory.get('/child/')
# Test default queryset
m = ChildAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all,
m.list_editable, m)
self.assertEqual(cl.query_set.count(), 60)
self.assertEqual(cl.paginator.count, 60)
self.assertEqual(cl.paginator.page_range, [1, 2, 3, 4, 5, 6])
# Test custom queryset
m = FilteredChildAdmin(Child, admin.site)
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, m.list_max_show_all,
m.list_editable, m)
self.assertEqual(cl.query_set.count(), 30)
self.assertEqual(cl.paginator.count, 30)
self.assertEqual(cl.paginator.page_range, [1, 2, 3])
def test_dynamic_list_display(self):
"""
Regression tests for #14206: dynamic list_display support.
"""
parent = Parent.objects.create(name='parent')
for i in range(10):
Child.objects.create(name='child %s' % i, parent=parent)
user_noparents = self._create_superuser('noparents')
user_parents = self._create_superuser('parents')
# Test with user 'noparents'
m = custom_site._registry[Child]
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertNotContains(response, 'Parent object')
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ['name', 'age'])
self.assertEqual(list_display_links, ['name'])
# Test with user 'parents'
m = DynamicListDisplayChildAdmin(Child, admin.site)
request = self._mocked_authenticated_request('/child/', user_parents)
response = m.changelist_view(request)
self.assertContains(response, 'Parent object')
custom_site.unregister(Child)
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ('parent', 'name', 'age'))
self.assertEqual(list_display_links, ['parent'])
# Test default implementation
custom_site.register(Child, ChildAdmin)
m = custom_site._registry[Child]
request = self._mocked_authenticated_request('/child/', user_noparents)
response = m.changelist_view(request)
self.assertContains(response, 'Parent object')
def test_show_all(self):
parent = Parent.objects.create(name='anything')
for i in range(30):
Child.objects.create(name='name %s' % i, parent=parent)
Child.objects.create(name='filtered %s' % i, parent=parent)
# Add "show all" parameter to request
request = self.factory.get('/child/', data={ALL_VAR: ''})
# Test valid "show all" request (number of total objects is under max)
m = ChildAdmin(Child, admin.site)
# 200 is the max we'll pass to ChangeList
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, 200, m.list_editable, m)
cl.get_results(request)
self.assertEqual(len(cl.result_list), 60)
# Test invalid "show all" request (number of total objects over max)
# falls back to paginated pages
m = ChildAdmin(Child, admin.site)
# 30 is the max we'll pass to ChangeList for this test
cl = ChangeList(request, Child, m.list_display, m.list_display_links,
m.list_filter, m.date_hierarchy, m.search_fields,
m.list_select_related, m.list_per_page, 30, m.list_editable, m)
cl.get_results(request)
self.assertEqual(len(cl.result_list), 10)
def test_dynamic_list_display_links(self):
"""
Regression tests for #16257: dynamic list_display_links support.
"""
parent = Parent.objects.create(name='parent')
for i in range(1, 10):
Child.objects.create(id=i, name='child %s' % i, parent=parent, age=i)
m = DynamicListDisplayLinksChildAdmin(Child, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/child/', superuser)
response = m.changelist_view(request)
for i in range(1, 10):
self.assertContains(response, '<a href="%s/">%s</a>' % (i, i))
list_display = m.get_list_display(request)
list_display_links = m.get_list_display_links(request, list_display)
self.assertEqual(list_display, ('parent', 'name', 'age'))
self.assertEqual(list_display_links, ['age'])
def test_tuple_list_display(self):
"""
Regression test for #17128
(ChangeList failing under Python 2.5 after r16319)
"""
swallow = Swallow.objects.create(
origin='Africa', load='12.34', speed='22.2')
model_admin = SwallowAdmin(Swallow, admin.site)
superuser = self._create_superuser('superuser')
request = self._mocked_authenticated_request('/swallow/', superuser)
response = model_admin.changelist_view(request)
# just want to ensure it doesn't blow up during rendering
self.assertContains(response, unicode(swallow.origin))
self.assertContains(response, unicode(swallow.load))
self.assertContains(response, unicode(swallow.speed))
def test_deterministic_order_for_unordered_model(self):
"""
Ensure that the primary key is systematically used in the ordering of
the changelist's results to guarantee a deterministic order, even
when the Model doesn't have any default ordering defined.
Refs #17198.
"""
superuser = self._create_superuser('superuser')
for counter in range(1, 51):
UnorderedObject.objects.create(id=counter, bool=True)
class UnorderedObjectAdmin(admin.ModelAdmin):
list_per_page = 10
def check_results_order(reverse=False):
admin.site.register(UnorderedObject, UnorderedObjectAdmin)
model_admin = UnorderedObjectAdmin(UnorderedObject, admin.site)
counter = 51 if reverse else 0
for page in range (0, 5):
request = self._mocked_authenticated_request('/unorderedobject/?p=%s' % page, superuser)
response = model_admin.changelist_view(request)
for result in response.context_data['cl'].result_list:
counter += -1 if reverse else 1
self.assertEqual(result.id, counter)
admin.site.unregister(UnorderedObject)
# When no order is defined at all, everything is ordered by 'pk'.
check_results_order()
# When an order field is defined but multiple records have the same
# value for that field, make sure everything gets ordered by pk as well.
UnorderedObjectAdmin.ordering = ['bool']
check_results_order()
# When order fields are defined, including the pk itself, use them.
UnorderedObjectAdmin.ordering = ['bool', '-pk']
check_results_order(reverse=True)
UnorderedObjectAdmin.ordering = ['bool', 'pk']
check_results_order()
UnorderedObjectAdmin.ordering = ['-id', 'bool']
check_results_order(reverse=True)
UnorderedObjectAdmin.ordering = ['id', 'bool']
check_results_order()
def test_deterministic_order_for_model_ordered_by_its_manager(self):
"""
Ensure that the primary key is systematically used in the ordering of
the changelist's results to guarantee a deterministic order, even
when the Model has a manager that defines a default ordering.
Refs #17198.
"""
superuser = self._create_superuser('superuser')
for counter in range(1, 51):
OrderedObject.objects.create(id=counter, bool=True, number=counter)
class OrderedObjectAdmin(admin.ModelAdmin):
list_per_page = 10
def check_results_order(reverse=False):
admin.site.register(OrderedObject, OrderedObjectAdmin)
model_admin = OrderedObjectAdmin(OrderedObject, admin.site)
counter = 51 if reverse else 0
for page in range (0, 5):
request = self._mocked_authenticated_request('/orderedobject/?p=%s' % page, superuser)
response = model_admin.changelist_view(request)
for result in response.context_data['cl'].result_list:
counter += -1 if reverse else 1
self.assertEqual(result.id, counter)
admin.site.unregister(OrderedObject)
# When no order is defined at all, use the model's default ordering (i.e. '-number')
check_results_order(reverse=True)
# When an order field is defined but multiple records have the same
# value for that field, make sure everything gets ordered by pk as well.
OrderedObjectAdmin.ordering = ['bool']
check_results_order()
# When order fields are defined, including the pk itself, use them.
OrderedObjectAdmin.ordering = ['bool', '-pk']
check_results_order(reverse=True)
OrderedObjectAdmin.ordering = ['bool', 'pk']
check_results_order()
OrderedObjectAdmin.ordering = ['-id', 'bool']
check_results_order(reverse=True)
OrderedObjectAdmin.ordering = ['id', 'bool']
check_results_order()
|
lisael/pg-django
|
tests/regressiontests/admin_changelist/tests.py
|
Python
|
bsd-3-clause
| 24,196 | 0.003017 |
# -*- coding:utf-8 -*-
import os
import sys
import sublime
import sublime_plugin
from .archiver import Archiver
from .settings import API_UPLOAD_URL
from .command import CommandWithStatus
sys.path.append(os.path.dirname(__file__))
import requests
class SublimeSyncUploadCommand(sublime_plugin.ApplicationCommand, CommandWithStatus):
def __init__(self, *args, **kwargs):
super(SublimeSyncUploadCommand, self).__init__(*args, **kwargs)
self.running = False
self.password = None
self.archive_filename = None
def post_send(self):
"""
Resets values
"""
self.unset_message()
self.running = False
self.password = None
self.archive_filename = None
def prompt_password(self):
"""
Shows an input panel for entering password
"""
sublime.active_window().show_input_panel(
"Enter archive password",
initial_text='',
on_done=self.pack_and_send_async,
on_cancel=self.pack_and_send_async,
on_change=None
)
def pack_and_send(self):
"""
Create archive and send it to the API
"""
self.set_message("Creating archive...")
archiver = Archiver()
self.archive_filename = archiver.pack_packages(password=self.password, exclude_from_package_control=self.exclude_from_package_control)
self.send_to_api()
def pack_and_send_async(self, password=None):
"""
Starts ansync command
"""
self.password = password or None
sublime.set_timeout_async(self.pack_and_send, 0)
def send_to_api(self):
"""
Send archive file to API
"""
self.set_message("Sending archive...")
f = open(self.archive_filename, 'rb')
files = {
'package': f.read(),
'version': sublime.version()[:1],
'username': self.username,
'api_key': self.api_key,
}
# Send data and delete temporary file
response = requests.post(url=API_UPLOAD_URL, files=files)
status_code = response.status_code
f.close()
os.unlink(self.archive_filename)
if status_code == 200:
self.set_message("Successfuly sent archive")
elif status_code == 403:
self.set_message("Error while sending archive: wrong credentials")
elif status_code == 413:
self.set_message("Error while sending archive: filesize too large (>10MB)")
else:
self.set_message("Unexpected error (HTTP STATUS: %s)" % response.status_code)
self.post_send()
def run(self, *args):
"""
Create an archive of all packages and settings
"""
if self.running:
self.set_quick_message("Already working on a backup...")
return
settings = sublime.load_settings('sublime-sync.sublime-settings')
self.running = True
self.username = settings.get('username', '')
self.api_key = settings.get('api_key', '')
self.exclude_from_package_control = settings.get('exclude_from_package_control', False)
self.encrypt = settings.get('encrypt', False)
if self.encrypt:
self.prompt_password()
else:
self.pack_and_send_async()
|
florianpaquet/sublime-sync
|
upload.py
|
Python
|
mit
| 3,373 | 0.001779 |
from flask_dance.consumer.storage import BaseStorage
import flask
class SessionStorage(BaseStorage):
"""
The default storage backend. Stores and retrieves OAuth tokens using
the :ref:`Flask session <flask:sessions>`.
"""
def __init__(self, key="{bp.name}_oauth_token"):
"""
Args:
key (str): The name to use as a key for storing the OAuth token in the
Flask session. This string will have ``.format(bp=self.blueprint)``
called on it before it is used. so you can refer to information
on the blueprint as part of the key. For example, ``{bp.name}``
will be replaced with the name of the blueprint.
"""
self.key = key
def get(self, blueprint):
key = self.key.format(bp=blueprint)
return flask.session.get(key)
def set(self, blueprint, token):
key = self.key.format(bp=blueprint)
flask.session[key] = token
def delete(self, blueprint):
key = self.key.format(bp=blueprint)
del flask.session[key]
|
singingwolfboy/flask-dance
|
flask_dance/consumer/storage/session.py
|
Python
|
mit
| 1,085 | 0.001843 |
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import functools
import inspect
import os
import pkg_resources
import uuid
from jinja2 import Template
from oslo.config import cfg
from designate import exceptions
from designate.openstack.common import log as logging
from designate.openstack.common import processutils
from designate.openstack.common import timeutils
LOG = logging.getLogger(__name__)
cfg.CONF.register_opts([
cfg.StrOpt('root-helper',
default='sudo designate-rootwrap /etc/designate/rootwrap.conf')
])
def find_config(config_path):
"""
Find a configuration file using the given hint.
Code nabbed from cinder.
:param config_path: Full or relative path to the config.
:returns: List of config paths
"""
possible_locations = [
config_path,
os.path.join(cfg.CONF.pybasedir, "etc", "designate", config_path),
os.path.join(cfg.CONF.pybasedir, "etc", config_path),
os.path.join(cfg.CONF.pybasedir, config_path),
"/etc/designate/%s" % config_path,
]
found_locations = []
for path in possible_locations:
LOG.debug('Searching for configuration at path: %s' % path)
if os.path.exists(path):
LOG.debug('Found configuration at path: %s' % path)
found_locations.append(os.path.abspath(path))
return found_locations
def read_config(prog, argv):
config_files = find_config('%s.conf' % prog)
cfg.CONF(argv[1:], project='designate', prog=prog,
default_config_files=config_files)
def resource_string(*args):
if len(args) == 0:
raise ValueError()
resource_path = os.path.join('resources', *args)
if not pkg_resources.resource_exists('designate', resource_path):
raise exceptions.ResourceNotFound('Could not find the requested '
'resource: %s' % resource_path)
return pkg_resources.resource_string('designate', resource_path)
def load_schema(version, name):
schema_string = resource_string('schemas', version, '%s.json' % name)
return json.loads(schema_string)
def load_template(template_name):
template_string = resource_string('templates', template_name)
return Template(template_string)
def render_template(template, **template_context):
if not isinstance(template, Template):
template = load_template(template)
return template.render(**template_context)
def render_template_to_file(template_name, output_path, makedirs=True,
**template_context):
output_folder = os.path.dirname(output_path)
# Create the output folder tree if necessary
if makedirs and not os.path.exists(output_folder):
os.makedirs(output_folder)
# Render the template
content = render_template(template_name, **template_context)
with open(output_path, 'w') as output_fh:
output_fh.write(content)
def execute(*cmd, **kw):
root_helper = kw.pop('root_helper', cfg.CONF.root_helper)
run_as_root = kw.pop('run_as_root', True)
return processutils.execute(*cmd, run_as_root=run_as_root,
root_helper=root_helper, **kw)
def get_item_properties(item, fields, mixed_case_fields=[], formatters={}):
"""Return a tuple containing the item properties.
:param item: a single item resource (e.g. Server, Tenant, etc)
:param fields: tuple of strings with the desired field names
:param mixed_case_fields: tuple of field names to preserve case
:param formatters: dictionary mapping field names to callables
to format the values
"""
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](item))
else:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
if not hasattr(item, field_name) and \
(isinstance(item, dict) and field_name in item):
data = item[field_name]
else:
data = getattr(item, field_name, '')
if data is None:
data = ''
row.append(data)
return tuple(row)
def get_columns(data):
"""
Some row's might have variable count of columns, ensure that we have the
same.
:param data: Results in [{}, {]}]
"""
columns = set()
def _seen(col):
columns.add(str(col))
map(lambda item: map(_seen, item.keys()), data)
return list(columns)
def increment_serial(serial=0):
new_date = int(timeutils.strtime(fmt="%Y%m%d"))
old_date = serial / 100
new_serial = new_date * 100
if new_date == old_date:
new_serial = serial + 1
return new_serial
def quote_string(string):
inparts = string.split(' ')
outparts = []
tmp = None
for part in inparts:
if part == '':
continue
elif part[0] == '"' and part[-1:] == '"' and part[-2:] != '\\"':
# Handle Quoted Words
outparts.append(part.strip('"'))
elif part[0] == '"':
# Handle Start of Quoted Sentance
tmp = part[1:]
elif tmp is not None and part[-1:] == '"' and part[-2:] != '\\"':
# Handle End of Quoted Sentance
tmp += " " + part.strip('"')
outparts.append(tmp)
tmp = None
elif tmp is not None:
# Handle Middle of Quoted Sentance
tmp += " " + part
else:
# Handle Standalone words
outparts.append(part)
if tmp is not None:
# Handle unclosed quoted strings
outparts.append(tmp)
# This looks odd, but both calls are necessary to ensure the end results
# is always consistent.
outparts = [o.replace('\\"', '"') for o in outparts]
outparts = [o.replace('"', '\\"') for o in outparts]
return '"' + '" "'.join(outparts) + '"'
def deep_dict_merge(a, b):
if not isinstance(b, dict):
return b
result = copy.deepcopy(a)
for k, v in b.iteritems():
if k in result and isinstance(result[k], dict):
result[k] = deep_dict_merge(result[k], v)
else:
result[k] = copy.deepcopy(v)
return result
def generate_uuid():
return str(uuid.uuid4())
def is_uuid_like(val):
"""Returns validation of a value as a UUID.
For our purposes, a UUID is a canonical form string:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
return str(uuid.UUID(val)) == val
except (TypeError, ValueError, AttributeError):
return False
def validate_uuid(*check):
"""
A wrapper to ensure that API controller methods arguments are valid UUID's.
Usage:
@validate_uuid('zone_id')
def get_all(self, zone_id):
return {}
"""
def inner(f):
def wrapper(*args, **kwargs):
arg_spec = inspect.getargspec(f).args
# Ensure that we have the exact number of parameters that the
# function expects. This handles URLs like
# /v2/zones/<UUID - valid or invalid>/invalid
# get, patch and delete return a 404, but Pecan returns a 405
# for a POST at the same URL
if (len(arg_spec) != len(args)):
raise exceptions.NotFound()
# Ensure that we have non-empty parameters in the cases where we
# have sub controllers - i.e. controllers at the 2nd level
# This is for URLs like /v2/zones/nameservers
# Ideally Pecan should be handling these cases, but until then
# we handle those cases here.
if (len(args) <= len(check)):
raise exceptions.NotFound()
for name in check:
pos = arg_spec.index(name)
if not is_uuid_like(args[pos]):
msg = 'Invalid UUID %s: %s' % (name, args[pos])
raise exceptions.InvalidUUID(msg)
return f(*args, **kwargs)
return functools.wraps(f)(wrapper)
return inner
|
NeCTAR-RC/designate
|
designate/utils.py
|
Python
|
apache-2.0
| 8,767 | 0.000114 |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
import sys
## Uncomment to run this script from an in-tree build (or adjust to the
## build directory) without installing the bindings.
#sys.path.append ('.')
#sys.path.append ('.libs')
import Hamlib
def StartUp():
"""Simple script to test the Hamlib.py module with Python2."""
print "%s: Python %s; %s\n" \
% (sys.argv[0], sys.version.split()[0], Hamlib.cvar.hamlib_version)
Hamlib.rig_set_debug(Hamlib.RIG_DEBUG_NONE)
# Init RIG_MODEL_DUMMY
my_rig = Hamlib.Rig(Hamlib.RIG_MODEL_DUMMY)
my_rig.set_conf("rig_pathname", "/dev/Rig")
my_rig.set_conf("retry", "5")
my_rig.open()
# 1073741944 is token value for "itu_region"
# but using get_conf is much more convenient
region = my_rig.get_conf(1073741944)
rpath = my_rig.get_conf("rig_pathname")
retry = my_rig.get_conf("retry")
print "status(str):\t\t", Hamlib.rigerror(my_rig.error_status)
print "get_conf:\t\tpath = %s, retry = %s, ITU region = %s" \
% (rpath, retry, region)
my_rig.set_freq(Hamlib.RIG_VFO_B, 5700000000)
my_rig.set_vfo(Hamlib.RIG_VFO_B)
print "freq:\t\t\t", my_rig.get_freq()
my_rig.set_freq(Hamlib.RIG_VFO_A, 145550000)
(mode, width) = my_rig.get_mode()
print "mode:\t\t\t", Hamlib.rig_strrmode(mode), "\nbandwidth:\t\t", width
my_rig.set_mode(Hamlib.RIG_MODE_CW)
(mode, width) = my_rig.get_mode()
print "mode:\t\t\t", Hamlib.rig_strrmode(mode), "\nbandwidth:\t\t", width
print "ITU_region:\t\t", my_rig.state.itu_region
print "Backend copyright:\t", my_rig.caps.copyright
print "Model:\t\t\t", my_rig.caps.model_name
print "Manufacturer:\t\t", my_rig.caps.mfg_name
print "Backend version:\t", my_rig.caps.version
print "Backend status:\t\t", Hamlib.rig_strstatus(my_rig.caps.status)
print "Rig info:\t\t", my_rig.get_info()
my_rig.set_level("VOX", 1)
print "VOX level:\t\t", my_rig.get_level_i("VOX")
my_rig.set_level(Hamlib.RIG_LEVEL_VOX, 5)
print "VOX level:\t\t", my_rig.get_level_i(Hamlib.RIG_LEVEL_VOX)
af = 12.34
print "Setting AF to %0.2f...." % (af)
my_rig.set_level("AF", af)
print "status:\t\t\t%s - %s" % (my_rig.error_status,
Hamlib.rigerror(my_rig.error_status))
print "AF level:\t\t%0.2f" % my_rig.get_level_f(Hamlib.RIG_LEVEL_AF)
print "strength:\t\t", my_rig.get_level_i(Hamlib.RIG_LEVEL_STRENGTH)
print "status:\t\t\t", my_rig.error_status
print "status(str):\t\t", Hamlib.rigerror(my_rig.error_status)
chan = Hamlib.channel(Hamlib.RIG_VFO_B)
my_rig.get_channel(chan)
print "get_channel status:\t", my_rig.error_status
print "VFO:\t\t\t", Hamlib.rig_strvfo(chan.vfo), ", ", chan.freq
print "Attenuators:\t\t", my_rig.caps.attenuator
print "\nSending Morse, '73'"
my_rig.send_morse(Hamlib.RIG_VFO_A, "73")
my_rig.close ()
print "\nSome static functions:"
err, lon1, lat1 = Hamlib.locator2longlat("IN98XC")
err, lon2, lat2 = Hamlib.locator2longlat("DM33DX")
err, loc1 = Hamlib.longlat2locator(lon1, lat1, 3)
err, loc2 = Hamlib.longlat2locator(lon2, lat2, 3)
print "Loc1:\t\tIN98XC -> %9.4f, %9.4f -> %s" % (lon1, lat1, loc1)
print "Loc2:\t\tDM33DX -> %9.4f, %9.4f -> %s" % (lon2, lat2, loc2)
err, dist, az = Hamlib.qrb(lon1, lat1, lon2, lat2)
longpath = Hamlib.distance_long_path(dist)
print "Distance:\t%.3f km, azimuth %.2f, long path:\t%.3f km" \
% (dist, az, longpath)
# dec2dms expects values from 180 to -180
# sw is 1 when deg is negative (west or south) as 0 cannot be signed
err, deg1, mins1, sec1, sw1 = Hamlib.dec2dms(lon1)
err, deg2, mins2, sec2, sw2 = Hamlib.dec2dms(lat1)
lon3 = Hamlib.dms2dec(deg1, mins1, sec1, sw1)
lat3 = Hamlib.dms2dec(deg2, mins2, sec2, sw2)
print 'Longitude:\t%4.4f, %4d° %2d\' %2d" %1s\trecoded: %9.4f' \
% (lon1, deg1, mins1, sec1, ('W' if sw1 else 'E'), lon3)
print 'Latitude:\t%4.4f, %4d° %2d\' %2d" %1s\trecoded: %9.4f' \
% (lat1, deg2, mins2, sec2, ('S' if sw2 else 'N'), lat3)
if __name__ == '__main__':
StartUp()
|
airween/hamlib
|
bindings/pytest.py
|
Python
|
gpl-2.0
| 4,198 | 0.00143 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
import logging
import os
import pathlib
import shlex
import subprocess
import sys
import warnings
from base64 import b64encode
from collections import OrderedDict
# Ignored Mypy on configparser because it thinks the configparser module has no _UNSET attribute
from configparser import _UNSET, ConfigParser, NoOptionError, NoSectionError # type: ignore
from typing import Dict, Optional, Tuple, Union
import yaml
from cryptography.fernet import Fernet
from airflow.exceptions import AirflowConfigException
log = logging.getLogger(__name__)
# show Airflow's deprecation warnings
warnings.filterwarnings(
action='default', category=DeprecationWarning, module='airflow')
warnings.filterwarnings(
action='default', category=PendingDeprecationWarning, module='airflow')
def expand_env_var(env_var):
"""
Expands (potentially nested) env vars by repeatedly applying
`expandvars` and `expanduser` until interpolation stops having
any effect.
"""
if not env_var:
return env_var
while True:
interpolated = os.path.expanduser(os.path.expandvars(str(env_var)))
if interpolated == env_var:
return interpolated
else:
env_var = interpolated
def run_command(command):
"""
Runs command and returns stdout
"""
process = subprocess.Popen(
shlex.split(command),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
close_fds=True)
output, stderr = [stream.decode(sys.getdefaultencoding(), 'ignore')
for stream in process.communicate()]
if process.returncode != 0:
raise AirflowConfigException(
"Cannot execute {}. Error code is: {}. Output: {}, Stderr: {}"
.format(command, process.returncode, output, stderr)
)
return output
def _read_default_config_file(file_name: str) -> Tuple[str, str]:
templates_dir = os.path.join(os.path.dirname(__file__), 'config_templates')
file_path = os.path.join(templates_dir, file_name)
with open(file_path, encoding='utf-8') as config_file:
return config_file.read(), file_path
DEFAULT_CONFIG, DEFAULT_CONFIG_FILE_PATH = _read_default_config_file('default_airflow.cfg')
TEST_CONFIG, TEST_CONFIG_FILE_PATH = _read_default_config_file('default_test.cfg')
def default_config_yaml() -> dict:
"""
Read Airflow configs from YAML file
:return: Python dictionary containing configs & their info
"""
templates_dir = os.path.join(os.path.dirname(__file__), 'config_templates')
file_path = os.path.join(templates_dir, "config.yml")
with open(file_path) as config_file:
return yaml.safe_load(config_file)
class AirflowConfigParser(ConfigParser):
# These configuration elements can be fetched as the stdout of commands
# following the "{section}__{name}__cmd" pattern, the idea behind this
# is to not store password on boxes in text files.
as_command_stdout = {
('core', 'sql_alchemy_conn'),
('core', 'fernet_key'),
('celery', 'broker_url'),
('celery', 'flower_basic_auth'),
('celery', 'result_backend'),
('atlas', 'password'),
('smtp', 'smtp_password'),
('ldap', 'bind_password'),
('kubernetes', 'git_password'),
}
# A mapping of (new option -> old option). where option is a tuple of section name and key.
# When reading new option, the old option will be checked to see if it exists. If it does a
# DeprecationWarning will be issued and the old option will be used instead
deprecated_options = {
('elasticsearch', 'host'): ('elasticsearch', 'elasticsearch_host'),
('elasticsearch', 'log_id_template'): ('elasticsearch', 'elasticsearch_log_id_template'),
('elasticsearch', 'end_of_log_mark'): ('elasticsearch', 'elasticsearch_end_of_log_mark'),
('elasticsearch', 'frontend'): ('elasticsearch', 'elasticsearch_frontend'),
('elasticsearch', 'write_stdout'): ('elasticsearch', 'elasticsearch_write_stdout'),
('elasticsearch', 'json_format'): ('elasticsearch', 'elasticsearch_json_format'),
('elasticsearch', 'json_fields'): ('elasticsearch', 'elasticsearch_json_fields'),
('logging', 'base_log_folder'): ('core', 'base_log_folder'),
('logging', 'remote_logging'): ('core', 'remote_logging'),
('logging', 'remote_log_conn_id'): ('core', 'remote_log_conn_id'),
('logging', 'remote_base_log_folder'): ('core', 'remote_base_log_folder'),
('logging', 'encrypt_s3_logs'): ('core', 'encrypt_s3_logs'),
('logging', 'logging_level'): ('core', 'logging_level'),
('logging', 'fab_logging_level'): ('core', 'fab_logging_level'),
('logging', 'logging_config_class'): ('core', 'logging_config_class'),
('logging', 'colored_console_log'): ('core', 'colored_console_log'),
('logging', 'colored_log_format'): ('core', 'colored_log_format'),
('logging', 'colored_formatter_class'): ('core', 'colored_formatter_class'),
('logging', 'log_format'): ('core', 'log_format'),
('logging', 'simple_log_format'): ('core', 'simple_log_format'),
('logging', 'task_log_prefix_template'): ('core', 'task_log_prefix_template'),
('logging', 'log_filename_template'): ('core', 'log_filename_template'),
('logging', 'log_processor_filename_template'): ('core', 'log_processor_filename_template'),
('logging', 'dag_processor_manager_log_location'): ('core', 'dag_processor_manager_log_location'),
('logging', 'task_log_reader'): ('core', 'task_log_reader'),
}
# A mapping of old default values that we want to change and warn the user
# about. Mapping of section -> setting -> { old, replace, by_version }
deprecated_values = {
'core': {
'task_runner': ('BashTaskRunner', 'StandardTaskRunner', '2.0'),
},
}
# This method transforms option names on every read, get, or set operation.
# This changes from the default behaviour of ConfigParser from lowercasing
# to instead be case-preserving
def optionxform(self, optionstr: str) -> str:
return optionstr
def __init__(self, default_config=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.airflow_defaults = ConfigParser(*args, **kwargs)
if default_config is not None:
self.airflow_defaults.read_string(default_config)
self.is_validated = False
def _validate(self):
if (
self.get("core", "executor") not in ('DebugExecutor', 'SequentialExecutor') and
"sqlite" in self.get('core', 'sql_alchemy_conn')):
raise AirflowConfigException(
"error: cannot use sqlite with the {}".format(
self.get('core', 'executor')))
for section, replacement in self.deprecated_values.items():
for name, info in replacement.items():
old, new, version = info
if self.get(section, name, fallback=None) == old:
# Make sure the env var option is removed, otherwise it
# would be read and used instead of the value we set
env_var = self._env_var_name(section, name)
os.environ.pop(env_var, None)
self.set(section, name, new)
warnings.warn(
'The {name} setting in [{section}] has the old default value '
'of {old!r}. This value has been changed to {new!r} in the '
'running config, but please update your config before Apache '
'Airflow {version}.'.format(
name=name, section=section, old=old, new=new, version=version
),
FutureWarning
)
self.is_validated = True
@staticmethod
def _env_var_name(section, key):
return 'AIRFLOW__{S}__{K}'.format(S=section.upper(), K=key.upper())
def _get_env_var_option(self, section, key):
# must have format AIRFLOW__{SECTION}__{KEY} (note double underscore)
env_var = self._env_var_name(section, key)
if env_var in os.environ:
return expand_env_var(os.environ[env_var])
# alternatively AIRFLOW__{SECTION}__{KEY}_CMD (for a command)
env_var_cmd = env_var + '_CMD'
if env_var_cmd in os.environ:
# if this is a valid command key...
if (section, key) in self.as_command_stdout:
return run_command(os.environ[env_var_cmd])
def _get_cmd_option(self, section, key):
fallback_key = key + '_cmd'
# if this is a valid command key...
if (section, key) in self.as_command_stdout:
if super().has_option(section, fallback_key):
command = super().get(section, fallback_key)
return run_command(command)
def get(self, section, key, **kwargs):
section = str(section).lower()
key = str(key).lower()
deprecated_section, deprecated_key = self.deprecated_options.get((section, key), (None, None))
# first check environment variables
option = self._get_env_var_option(section, key)
if option is not None:
return option
if deprecated_section:
option = self._get_env_var_option(deprecated_section, deprecated_key)
if option is not None:
self._warn_deprecate(section, key, deprecated_section, deprecated_key)
return option
# ...then the config file
if super().has_option(section, key):
# Use the parent's methods to get the actual config here to be able to
# separate the config from default config.
return expand_env_var(
super().get(section, key, **kwargs))
if deprecated_section:
if super().has_option(deprecated_section, deprecated_key):
self._warn_deprecate(section, key, deprecated_section, deprecated_key)
return expand_env_var(super().get(
deprecated_section,
deprecated_key,
**kwargs
))
# ...then commands
option = self._get_cmd_option(section, key)
if option:
return option
if deprecated_section:
option = self._get_cmd_option(deprecated_section, deprecated_key)
if option:
self._warn_deprecate(section, key, deprecated_section, deprecated_key)
return option
# ...then the default config
if self.airflow_defaults.has_option(section, key) or 'fallback' in kwargs:
return expand_env_var(
self.airflow_defaults.get(section, key, **kwargs))
else:
log.warning(
"section/key [%s/%s] not found in config", section, key
)
raise AirflowConfigException(
"section/key [{section}/{key}] not found "
"in config".format(section=section, key=key))
def getboolean(self, section, key, **kwargs):
val = str(self.get(section, key, **kwargs)).lower().strip()
if '#' in val:
val = val.split('#')[0].strip()
if val in ('t', 'true', '1'):
return True
elif val in ('f', 'false', '0'):
return False
else:
raise ValueError(
'The value for configuration option "{}:{}" is not a '
'boolean (received "{}").'.format(section, key, val))
def getint(self, section, key, **kwargs):
return int(self.get(section, key, **kwargs))
def getfloat(self, section, key, **kwargs):
return float(self.get(section, key, **kwargs))
def read(self, filenames, **kwargs):
super().read(filenames, **kwargs)
self._validate()
def read_dict(self, *args, **kwargs):
super().read_dict(*args, **kwargs)
self._validate()
def has_option(self, section, option):
try:
# Using self.get() to avoid reimplementing the priority order
# of config variables (env, config, cmd, defaults)
# UNSET to avoid logging a warning about missing values
self.get(section, option, fallback=_UNSET)
return True
except (NoOptionError, NoSectionError):
return False
def remove_option(self, section, option, remove_default=True):
"""
Remove an option if it exists in config from a file or
default config. If both of config have the same option, this removes
the option in both configs unless remove_default=False.
"""
if super().has_option(section, option):
super().remove_option(section, option)
if self.airflow_defaults.has_option(section, option) and remove_default:
self.airflow_defaults.remove_option(section, option)
def getsection(self, section: str) -> Optional[Dict[str, Union[str, int, float, bool]]]:
"""
Returns the section as a dict. Values are converted to int, float, bool
as required.
:param section: section from the config
:rtype: dict
"""
if (section not in self._sections and section not in self.airflow_defaults._sections): # type: ignore
return None
_section = copy.deepcopy(self.airflow_defaults._sections[section]) # type: ignore
if section in self._sections: # type: ignore
_section.update(copy.deepcopy(self._sections[section])) # type: ignore
section_prefix = 'AIRFLOW__{S}__'.format(S=section.upper())
for env_var in sorted(os.environ.keys()):
if env_var.startswith(section_prefix):
key = env_var.replace(section_prefix, '')
if key.endswith("_CMD"):
key = key[:-4]
key = key.lower()
_section[key] = self._get_env_var_option(section, key)
for key, val in _section.items(): # type: ignore
try:
val = int(val)
except ValueError:
try:
val = float(val)
except ValueError:
if val.lower() in ('t', 'true'):
val = True
elif val.lower() in ('f', 'false'):
val = False
_section[key] = val
return _section
def write(self, fp, space_around_delimiters=True):
# This is based on the configparser.RawConfigParser.write method code to add support for
# reading options from environment variables.
if space_around_delimiters:
d = " {} ".format(self._delimiters[0]) # type: ignore
else:
d = self._delimiters[0] # type: ignore
if self._defaults:
self._write_section(fp, self.default_section, self._defaults.items(), d) # type: ignore
for section in self._sections:
self._write_section(fp, section, self.getsection(section).items(), d) # type: ignore
def as_dict(
self, display_source=False, display_sensitive=False, raw=False,
include_env=True, include_cmds=True) -> Dict[str, Dict[str, str]]:
"""
Returns the current configuration as an OrderedDict of OrderedDicts.
:param display_source: If False, the option value is returned. If True,
a tuple of (option_value, source) is returned. Source is either
'airflow.cfg', 'default', 'env var', or 'cmd'.
:type display_source: bool
:param display_sensitive: If True, the values of options set by env
vars and bash commands will be displayed. If False, those options
are shown as '< hidden >'
:type display_sensitive: bool
:param raw: Should the values be output as interpolated values, or the
"raw" form that can be fed back in to ConfigParser
:type raw: bool
:param include_env: Should the value of configuration from AIRFLOW__
environment variables be included or not
:type include_env: bool
:param include_cmds: Should the result of calling any *_cmd config be
set (True, default), or should the _cmd options be left as the
command to run (False)
:type include_cmds: bool
:rtype: Dict[str, Dict[str, str]]
:return: Dictionary, where the key is the name of the section and the content is
the dictionary with the name of the parameter and its value.
"""
cfg: Dict[str, Dict[str, str]] = {}
configs = [
('default', self.airflow_defaults),
('airflow.cfg', self),
]
for (source_name, config) in configs:
for section in config.sections():
sect = cfg.setdefault(section, OrderedDict())
for (k, val) in config.items(section=section, raw=raw):
if display_source:
val = (val, source_name)
sect[k] = val
# add env vars and overwrite because they have priority
if include_env:
for ev in [ev for ev in os.environ if ev.startswith('AIRFLOW__')]:
try:
_, section, key = ev.split('__', 2)
opt = self._get_env_var_option(section, key)
except ValueError:
continue
if not display_sensitive and ev != 'AIRFLOW__CORE__UNIT_TEST_MODE':
opt = '< hidden >'
elif raw:
opt = opt.replace('%', '%%')
if display_source:
opt = (opt, 'env var')
section = section.lower()
# if we lower key for kubernetes_environment_variables section,
# then we won't be able to set any Airflow environment
# variables. Airflow only parse environment variables starts
# with AIRFLOW_. Therefore, we need to make it a special case.
if section != 'kubernetes_environment_variables':
key = key.lower()
cfg.setdefault(section, OrderedDict()).update({key: opt})
# add bash commands
if include_cmds:
for (section, key) in self.as_command_stdout:
opt = self._get_cmd_option(section, key)
if opt:
if not display_sensitive:
opt = '< hidden >'
if display_source:
opt = (opt, 'cmd')
elif raw:
opt = opt.replace('%', '%%')
cfg.setdefault(section, OrderedDict()).update({key: opt})
del cfg[section][key + '_cmd']
return cfg
def load_test_config(self):
"""
Load the unit test configuration.
Note: this is not reversible.
"""
# override any custom settings with defaults
log.info("Overriding settings with defaults from %s", DEFAULT_CONFIG_FILE_PATH)
self.read_string(parameterized_config(DEFAULT_CONFIG))
# then read test config
log.info("Reading default test configuration from %s", TEST_CONFIG_FILE_PATH)
self.read_string(parameterized_config(TEST_CONFIG))
# then read any "custom" test settings
log.info("Reading test configuration from %s", TEST_CONFIG_FILE)
self.read(TEST_CONFIG_FILE)
def _warn_deprecate(self, section, key, deprecated_section, deprecated_name):
if section == deprecated_section:
warnings.warn(
'The {old} option in [{section}] has been renamed to {new} - the old '
'setting has been used, but please update your config.'.format(
old=deprecated_name,
new=key,
section=section,
),
DeprecationWarning,
stacklevel=3,
)
else:
warnings.warn(
'The {old_key} option in [{old_section}] has been moved to the {new_key} option in '
'[{new_section}] - the old setting has been used, but please update your config.'.format(
old_section=deprecated_section,
old_key=deprecated_name,
new_key=key,
new_section=section,
),
DeprecationWarning,
stacklevel=3,
)
def get_airflow_home():
return expand_env_var(os.environ.get('AIRFLOW_HOME', '~/airflow'))
def get_airflow_config(airflow_home):
if 'AIRFLOW_CONFIG' not in os.environ:
return os.path.join(airflow_home, 'airflow.cfg')
return expand_env_var(os.environ['AIRFLOW_CONFIG'])
# Setting AIRFLOW_HOME and AIRFLOW_CONFIG from environment variables, using
# "~/airflow" and "$AIRFLOW_HOME/airflow.cfg" respectively as defaults.
AIRFLOW_HOME = get_airflow_home()
AIRFLOW_CONFIG = get_airflow_config(AIRFLOW_HOME)
pathlib.Path(AIRFLOW_HOME).mkdir(parents=True, exist_ok=True)
# Set up dags folder for unit tests
# this directory won't exist if users install via pip
_TEST_DAGS_FOLDER = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'tests',
'dags')
if os.path.exists(_TEST_DAGS_FOLDER):
TEST_DAGS_FOLDER = _TEST_DAGS_FOLDER
else:
TEST_DAGS_FOLDER = os.path.join(AIRFLOW_HOME, 'dags')
# Set up plugins folder for unit tests
_TEST_PLUGINS_FOLDER = os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'tests',
'plugins')
if os.path.exists(_TEST_PLUGINS_FOLDER):
TEST_PLUGINS_FOLDER = _TEST_PLUGINS_FOLDER
else:
TEST_PLUGINS_FOLDER = os.path.join(AIRFLOW_HOME, 'plugins')
def parameterized_config(template):
"""
Generates a configuration from the provided template + variables defined in
current scope
:param template: a config content templated with {{variables}}
"""
all_vars = {k: v for d in [globals(), locals()] for k, v in d.items()}
return template.format(**all_vars)
def get_airflow_test_config(airflow_home):
if 'AIRFLOW_TEST_CONFIG' not in os.environ:
return os.path.join(airflow_home, 'unittests.cfg')
return expand_env_var(os.environ['AIRFLOW_TEST_CONFIG'])
TEST_CONFIG_FILE = get_airflow_test_config(AIRFLOW_HOME)
# only generate a Fernet key if we need to create a new config file
if not os.path.isfile(TEST_CONFIG_FILE) or not os.path.isfile(AIRFLOW_CONFIG):
FERNET_KEY = Fernet.generate_key().decode()
else:
FERNET_KEY = ''
SECRET_KEY = b64encode(os.urandom(16)).decode('utf-8')
TEMPLATE_START = (
'# ----------------------- TEMPLATE BEGINS HERE -----------------------')
if not os.path.isfile(TEST_CONFIG_FILE):
log.info(
'Creating new Airflow config file for unit tests in: %s', TEST_CONFIG_FILE
)
with open(TEST_CONFIG_FILE, 'w') as file:
cfg = parameterized_config(TEST_CONFIG)
file.write(cfg.split(TEMPLATE_START)[-1].strip())
if not os.path.isfile(AIRFLOW_CONFIG):
log.info(
'Creating new Airflow config file in: %s',
AIRFLOW_CONFIG
)
with open(AIRFLOW_CONFIG, 'w') as file:
cfg = parameterized_config(DEFAULT_CONFIG)
cfg = cfg.split(TEMPLATE_START)[-1].strip()
file.write(cfg)
log.info("Reading the config from %s", AIRFLOW_CONFIG)
conf = AirflowConfigParser(default_config=parameterized_config(DEFAULT_CONFIG))
conf.read(AIRFLOW_CONFIG)
if conf.has_option('core', 'AIRFLOW_HOME'):
msg = (
'Specifying both AIRFLOW_HOME environment variable and airflow_home '
'in the config file is deprecated. Please use only the AIRFLOW_HOME '
'environment variable and remove the config file entry.'
)
if 'AIRFLOW_HOME' in os.environ:
warnings.warn(msg, category=DeprecationWarning)
elif conf.get('core', 'airflow_home') == AIRFLOW_HOME:
warnings.warn(
'Specifying airflow_home in the config file is deprecated. As you '
'have left it at the default value you should remove the setting '
'from your airflow.cfg and suffer no change in behaviour.',
category=DeprecationWarning,
)
else:
AIRFLOW_HOME = conf.get('core', 'airflow_home')
warnings.warn(msg, category=DeprecationWarning)
WEBSERVER_CONFIG = AIRFLOW_HOME + '/webserver_config.py'
if not os.path.isfile(WEBSERVER_CONFIG):
log.info('Creating new FAB webserver config file in: %s', WEBSERVER_CONFIG)
DEFAULT_WEBSERVER_CONFIG, _ = _read_default_config_file('default_webserver_config.py')
with open(WEBSERVER_CONFIG, 'w') as file:
file.write(DEFAULT_WEBSERVER_CONFIG)
if conf.getboolean('core', 'unit_test_mode'):
conf.load_test_config()
# Historical convenience functions to access config entries
def load_test_config():
warnings.warn(
"Accessing configuration method 'load_test_config' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.load_test_config'",
DeprecationWarning,
stacklevel=2
)
conf.load_test_config()
def get(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'get' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.get'",
DeprecationWarning,
stacklevel=2
)
return conf.get(*args, **kwargs)
def getboolean(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'getboolean' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getboolean'",
DeprecationWarning,
stacklevel=2
)
return conf.getboolean(*args, **kwargs)
def getfloat(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'getfloat' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getfloat'",
DeprecationWarning,
stacklevel=2
)
return conf.getfloat(*args, **kwargs)
def getint(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'getint' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getint'",
DeprecationWarning,
stacklevel=2
)
return conf.getint(*args, **kwargs)
def getsection(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'getsection' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.getsection'",
DeprecationWarning,
stacklevel=2
)
return conf.getint(*args, **kwargs)
def has_option(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'has_option' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.has_option'",
DeprecationWarning,
stacklevel=2
)
return conf.has_option(*args, **kwargs)
def remove_option(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'remove_option' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.remove_option'",
DeprecationWarning,
stacklevel=2
)
return conf.remove_option(*args, **kwargs)
def as_dict(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'as_dict' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.as_dict'",
DeprecationWarning,
stacklevel=2
)
return conf.as_dict(*args, **kwargs)
def set(*args, **kwargs):
warnings.warn(
"Accessing configuration method 'set' directly from the configuration module is "
"deprecated. Please access the configuration from the 'configuration.conf' object via "
"'conf.set'",
DeprecationWarning,
stacklevel=2
)
return conf.set(*args, **kwargs)
|
mtagle/airflow
|
airflow/configuration.py
|
Python
|
apache-2.0
| 29,594 | 0.002298 |
import numpy as np
import binvox_rw
import numba
import mcubes
@numba.jit(forceobj=True)
def get_voxel_resolution(pc, patch_size):
"""
This function takes in a pointcloud and returns the resolution
of a voxel given that there will be a fixed number of voxels.
For example if patch_size is 40, then we are determining the
side length of a single voxel in meters. Sovoxel_resolution
may end up being something like 0.01 for a 1cm^3 voxel size
jaccard_distance
:type pc: numpy.ndarray
:param pc: nx3 numpy array representing a pointcloud
:type patch_size: int
:param patch_size: int, how many voxels are there going to be.
:rtype voxel_resolution: float
"""
if not pc.shape[1] == 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(pc.shape))
min_x = pc[:, 0].min()
min_y = pc[:, 1].min()
min_z = pc[:, 2].min()
max_x = pc[:, 0].max()
max_y = pc[:, 1].max()
max_z = pc[:, 2].max()
max_dim = max((max_x - min_x),
(max_y - min_y),
(max_z - min_z))
voxel_resolution = (1.0 * max_dim) / patch_size
return voxel_resolution
@numba.jit(forceobj=True)
def get_bbox_center(pc):
"""
This function takes an nx3 pointcloud and returns a tuple
(x,y,z) which is the center of the bbox that contains
the pointcloud
:type pc: numpy.ndarray
:param pc: a nx3 numpy array representing a pointcloud
:rtype numpy.ndarray
"""
if not pc.shape[1] == 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(pc.shape))
min_x = pc[:, 0].min()
min_y = pc[:, 1].min()
min_z = pc[:, 2].min()
max_x = pc[:, 0].max()
max_y = pc[:, 1].max()
max_z = pc[:, 2].max()
center = np.array([min_x + (max_x - min_x) / 2.0,
min_y + (max_y - min_y) / 2.0,
min_z + (max_z - min_z) / 2.0])
return center
@numba.jit(forceobj=True)
def voxelize_points(points, pc_bbox_center, voxel_resolution, num_voxels_per_dim, pc_center_in_voxel_grid):
"""
This function takes a pointcloud and produces a an occupancy map or voxel grid surrounding the points.
:type points: numpy.ndarray
:param points: an nx3 numpy array representing a pointcloud
:type pc_bbox_center: numpy.ndarray
:param pc_bbox_center: numpy.ndarray of shape (3,) representing the center of the bbox that contains points
:type voxel_resolution: float
:param voxel_resolution: float describing in meters the length of an individual voxel edge. i.e 0.01 would
mean each voxel is 1cm^3
:type num_voxels_per_dim: int
:param num_voxels_per_dim: how many voxels along a dimension. normally 40, for a 40x40x40 voxel grid
:type pc_center_in_voxel_grid: tuple
:param pc_center_in_voxel_grid: (x,y,z) in voxel coords of where to place the center of the points in the voxel grid
if using 40x40x40 voxel grid, then pc_center_in_voxel_grid = (20,20,20). We often using something more
like (20,20,18) when doing shape completion so there is more room in the back of the grid for the
object to be completed.
"""
# this is the voxel grid we are going to return
voxel_grid = np.zeros((num_voxels_per_dim,
num_voxels_per_dim,
num_voxels_per_dim), dtype=np.bool)
# take the points and convert them from meters to voxel space coords
centered_scaled_points = np.floor(
(points - np.array(pc_bbox_center) + np.array(
pc_center_in_voxel_grid) * voxel_resolution) / voxel_resolution)
# remove any points that are beyond the area that falls in our voxel grid
mask = centered_scaled_points.max(axis=1) < num_voxels_per_dim
centered_scaled_points = centered_scaled_points[mask]
# if we don't have any more points that fall within our voxel grid
# return an empty grid
if centered_scaled_points.shape[0] == 0:
return voxel_grid
# remove any points that are outside of the region we are voxelizing
# as they are to small.
mask = centered_scaled_points.min(axis=1) > 0
centered_scaled_points = centered_scaled_points[mask]
# if we don't have any more points that fall within our voxel grid,
# return an empty grid
if centered_scaled_points.shape[0] == 0:
return voxel_grid
# treat our remaining points as ints, since we are already in voxel coordinate space.
# this points shoule be things like (5, 6, 7) which represent indices in the voxel grid.
csp_int = centered_scaled_points.astype(int)
# create a mask from our set of points.
mask = (csp_int[:, 0], csp_int[:, 1], csp_int[:, 2])
# apply the mask to our voxel grid setting voxel that had points in them to be occupied
voxel_grid[mask] = 1
return voxel_grid
def pc_to_binvox(points, **kwargs):
"""
This function creates a binvox object from a pointcloud. The voxel grid is slightly off center from the
pointcloud bbox center so that the back of the grid has more room for the completion.
:type points: numpy.ndarray
:param points: nx3 numpy array representing a pointcloud
:rtype: binvox_rw.Voxels
:param kwargs:
See below
:Keyword Arguments:
* *patch_size* (``int``) --
how many voxels along a single dimension of the voxel grid.
Ex: patch_size=40 gives us a 40^3 voxel grid
Defaults to 40
* *percent_patch_size* (``float``) --
how much of the voxel grid do we want our pointcloud to fill.
make this < 1 so that there is some padding on the edges
Defaults to 0.8
* *percent_offset* (``tuple``) --
Where should the center of the points be placed inside the voxel grid.
normally make PERCENT_Z < 0.5 so that the points are placed towards the front of the grid
this leaves more room for the shape completion to fill in the occluded back half of the occupancy grid.
"""
patch_size = kwargs.get("patch_size", 40)
percent_offset = kwargs.get("percent_offset", (0.5, 0.5, 0.45))
percent_patch_size = kwargs.get("percent_patch_size", 0.8)
if points.shape[1] != 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(points.shape))
if len(percent_offset) != 3:
raise Exception("Percent offset should be a tuple of size 3, instead got {}".format(percent_offset))
percent_x, percent_y, percent_z = percent_offset
# get the center of the pointcloud in meters. Ex: center = np.array([0.2, 0.1, 2.0])
voxel_center = get_bbox_center(points)
# get the size of an individual voxel. Ex: voxel_resolution=0.01 meaning 1cm^3 voxel
# PERCENT_PATCH_SIZE determines how much extra padding to leave on the sides
voxel_resolution = get_voxel_resolution(points, percent_patch_size * patch_size)
# this tuple is where we want to stick the center of the pointcloud in our voxel grid
# Ex: (20, 20, 18) leaving some extra room in the back half.
pc_center_in_voxel_grid = (patch_size*percent_x, patch_size*percent_y, patch_size*percent_z)
# create a voxel grid.
vox_np = voxelize_points(
points=points[:, 0:3],
pc_bbox_center=voxel_center,
voxel_resolution=voxel_resolution,
num_voxels_per_dim=patch_size,
pc_center_in_voxel_grid=pc_center_in_voxel_grid)
# location in meters of the bottom corner of the voxel grid in world space
offset = np.array(voxel_center) - np.array(pc_center_in_voxel_grid) * voxel_resolution
# create a voxel grid object to contain the grid, shape, offset in the world, and grid resolution
voxel_grid = binvox_rw.Voxels(vox_np, vox_np.shape, tuple(offset), voxel_resolution * patch_size, "xyz")
# Where am I putting my point cloud relative to the center of my voxel grid
# ex. (20, 20, 20) or (20, 20, 18)
center_point_in_voxel_grid = (patch_size * percent_x, patch_size * percent_y, patch_size * percent_z)
return voxel_grid, voxel_center, voxel_resolution, center_point_in_voxel_grid
@numba.jit(forceobj=True)
def get_ternary_voxel_grid(binary_voxel_grid):
"""
Takes a binary occupancy voxel grid for the surface of the object and
returns a ternary occupancy voxel grid.
:param binary_voxel_grid: a voxel grid that indicates whether a voxel is
occupied by the visible surface ("1") or not occupied by the visible
surface ("0"). If you're seeing a box, the "1"s would represent the location
of the part of the box's surface that you can see, while the "0" would
represent everything else.
:param method: Can be 'simple' or 'projection'.
:return: a voxel grid that indicates whether a voxel is visually occluded
("2"), occupied by the visible surface ("1"), or visibly known to be
unoccupied ("0").
"""
if isinstance(binary_voxel_grid, binvox_rw.Voxels):
binary_voxel_grid = binary_voxel_grid.data
if not isinstance(binary_voxel_grid, np.ndarray):
raise ValueError("binary_voxel_grid must be Voxels or ndarray")
voxel_grid_shape = binary_voxel_grid.shape
assert len(voxel_grid_shape) == 3
# Initialize all ternary grid values to 0.
ternary_voxel_grid = np.zeros(voxel_grid_shape)
# The 'simple' method assumes that the camera is an infinite distance
# away from the object and thus considers as occluded every z value
# behind the surface for a fixed x and y. Perspective isn't taken into
# account.
for i in range(voxel_grid_shape[0]):
for j in range(voxel_grid_shape[1]):
for k in range(voxel_grid_shape[2]):
if binary_voxel_grid[i, j, k] > 0:
# Surface found. set surface to 1 in the ternary_voxel
# grid, and everything behind it to 2.
ternary_voxel_grid[i, j, k] = 1
ternary_voxel_grid[i, j, k + 1:voxel_grid_shape[2]] = 2
break
return ternary_voxel_grid
@numba.jit(forceobj=True)
def rescale_mesh(vertices, patch_center, voxel_resolution, pc_center_in_voxel_grid):
return vertices * voxel_resolution - np.array(pc_center_in_voxel_grid) * voxel_resolution + np.array(patch_center)
@numba.jit(forceobj=True)
def create_voxel_grid_around_point_scaled(
points,
patch_center,
voxel_resolution,
num_voxels_per_dim,
pc_center_in_voxel_grid
):
voxel_grid = np.zeros((num_voxels_per_dim, num_voxels_per_dim, num_voxels_per_dim, 1), dtype=np.float32)
centered_scaled_points = np.floor(
(points - np.array(patch_center) + np.array(
pc_center_in_voxel_grid) * voxel_resolution) / voxel_resolution)
mask = centered_scaled_points.max(axis=1) < num_voxels_per_dim
centered_scaled_points = centered_scaled_points[mask]
if centered_scaled_points.shape[0] == 0:
return voxel_grid
mask = centered_scaled_points.min(axis=1) > 0
centered_scaled_points = centered_scaled_points[mask]
if centered_scaled_points.shape[0] == 0:
return voxel_grid
csp_int = centered_scaled_points.astype(int)
mask = (csp_int[:, 0], csp_int[:, 1], csp_int[:, 2],
np.zeros((csp_int.shape[0]), dtype=int))
voxel_grid[mask] = 1
return voxel_grid
def pc_to_binvox_for_shape_completion(points,
patch_size):
"""
This function creates a binvox object from a pointcloud. The voxel grid is slightly off center from the
pointcloud bbox center so that the back of the grid has more room for the completion.
:type points: numpy.ndarray
:param points: nx3 numpy array representing a pointcloud
:type patch_size: int
:param patch_size: how many voxels along a single dimension of the voxel grid.
Ex: patch_size=40 gives us a 40^3 voxel grid
:rtype: binvox_rw.Voxels
"""
if points.shape[1] != 3:
raise Exception("Invalid pointcloud size, should be nx3, but is {}".format(points.shape))
# how much of the voxel grid do we want our pointcloud to fill.
# make this < 1 so that there is some padding on the edges
PERCENT_PATCH_SIZE = (4.0/5.0)
# Where should the center of the points be placed inside the voxel grid.
# normally make PERCENT_Z < 0.5 so that the points are placed towards the front of the grid
# this leaves more room for the shape completion to fill in the occluded back half of the occupancy grid.
PERCENT_X = 0.5
PERCENT_Y = 0.5
PERCENT_Z = 0.45
# get the center of the pointcloud in meters. Ex: center = np.array([0.2, 0.1, 2.0])
center = get_bbox_center(points)
# get the size of an individual voxel. Ex: voxel_resolution=0.01 meaning 1cm^3 voxel
# PERCENT_PATCH_SIZE determines how much extra padding to leave on the sides
voxel_resolution = get_voxel_resolution(points, PERCENT_PATCH_SIZE * patch_size)
# this tuple is where we want to stick the center of the pointcloud in our voxel grid
# Ex: (20, 20, 18) leaving some extra room in the back half.
pc_center_in_voxel_grid = (patch_size*PERCENT_X, patch_size*PERCENT_Y, patch_size*PERCENT_Z)
# create a voxel grid.
vox_np = voxelize_points(
points=points[:, 0:3],
pc_bbox_center=center,
voxel_resolution=voxel_resolution,
num_voxels_per_dim=patch_size,
pc_center_in_voxel_grid=pc_center_in_voxel_grid)
# location in meters of the bottom corner of the voxel grid in world space
offset = np.array(center) - np.array(pc_center_in_voxel_grid) * voxel_resolution
# create a voxel grid object to contain the grid, shape, offset in the world, and grid resolution
vox = binvox_rw.Voxels(vox_np, vox_np.shape, tuple(offset), voxel_resolution * patch_size, "xyz")
return vox
@numba.jit(forceobj=True)
def voxel_grid_jaccard_similarity(a, b):
'''
Returns the number of pixels of the intersection of two voxel grids divided
by the number of pixels in the union.
The inputs are expected to be numpy 5D ndarrays in BZCXY format.
'''
return np.mean(np.sum(a * b, axis=1) / np.sum((a + b) - a * b, axis=1))
|
CRLab/curvox
|
src/curvox/pc_vox_utils.py
|
Python
|
mit
| 14,287 | 0.00378 |
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class CookiesTest(object):
def test_create_and_access_a_cookie(self):
"should be able to create and access a cookie"
self.browser.cookies.add({'sha': 'zam'})
self.assertEqual(self.browser.cookies['sha'], 'zam')
def test_create_many_cookies_at_once_as_dict(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam', 'foo': 'bar'}
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_many_cookies_at_once_as_list(self):
"should be able to create many cookies at once as list"
cookies = [{'sha': 'zam'}, {'foo': 'bar'}]
self.browser.cookies.add(cookies)
self.assertEqual(self.browser.cookies['sha'], 'zam')
self.assertEqual(self.browser.cookies['foo'], 'bar')
def test_create_some_cookies_and_delete_them_all(self):
"should be able to delete all cookies"
self.browser.cookies.add({'whatever': 'and ever'})
self.browser.cookies.add({'anothercookie': 'im bored'})
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_a_cookie(self):
"should be able to create and destroy a cookie"
self.browser.cookies.delete()
self.browser.cookies.add({'cookie': 'with milk'})
self.browser.cookies.delete('cookie')
self.assertEqual(self.browser.cookies, {})
def test_create_and_delete_many_cookies(self):
"should be able to create and destroy many cookies"
self.browser.cookies.delete()
self.browser.cookies.add({'acookie': 'cooked'})
self.browser.cookies.add({'anothercookie': 'uncooked'})
self.browser.cookies.add({'notacookie': 'halfcooked'})
self.browser.cookies.delete('acookie', 'notacookie')
self.assertEqual('uncooked', self.browser.cookies['anothercookie'])
def test_try_to_destroy_an_absent_cookie_and_nothing_happens(self):
self.browser.cookies.delete()
self.browser.cookies.add({'foo': 'bar'})
self.browser.cookies.delete('mwahahahaha')
self.assertEqual(self.browser.cookies, {'foo': 'bar'})
def test_create_and_get_all_cookies(self):
"should be able to create some cookies and retrieve them all"
self.browser.cookies.delete()
self.browser.cookies.add({'taco': 'shrimp'})
self.browser.cookies.add({'lavar': 'burton'})
self.assertEqual(len(self.browser.cookies.all()), 2)
self.browser.cookies.delete()
self.assertEqual(self.browser.cookies.all(), {})
def test_create_and_use_contains(self):
"should be able to create many cookies at once as dict"
cookies = {'sha': 'zam'}
self.browser.cookies.add(cookies)
self.assertIn('sha', self.browser.cookies)
self.assertNotIn('foo', self.browser.cookies)
|
nikolas/splinter
|
tests/cookies.py
|
Python
|
bsd-3-clause
| 3,162 | 0 |
"""
Search and get metadata for articles in Pubmed.
"""
import logging
import requests
from time import sleep
from typing import List
from functools import lru_cache
import xml.etree.ElementTree as ET
from indra.util import UnicodeXMLTreeBuilder as UTB
logger = logging.getLogger(__name__)
pubmed_search = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi'
pubmed_fetch = 'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi'
# Send request can't be cached by lru_cache because it takes a dict
# (a mutable/unhashable type) as an argument. We cache the callers instead.
def send_request(url, data):
try:
res = requests.get(url, params=data)
except requests.exceptions.Timeout as e:
logger.error('PubMed request timed out')
logger.error('url: %s, data: %s' % (url, data))
logger.error(e)
return None
except requests.exceptions.RequestException as e:
logger.error('PubMed request exception')
logger.error('url: %s, data: %s' % (url, data))
logger.error(e)
return None
if res.status_code == 429:
sleep(0.5)
res = requests.get(url, params=data)
if not res.status_code == 200:
logger.error('Got return code %d from pubmed client.'
% res.status_code)
return None
tree = ET.XML(res.content, parser=UTB())
return tree
@lru_cache(maxsize=100)
def get_ids(search_term, **kwargs):
"""Search Pubmed for paper IDs given a search term.
Search options can be passed as keyword arguments, some of which are
custom keywords identified by this function, while others are passed on
as parameters for the request to the PubMed web service
For details on parameters that can be used in PubMed searches, see
https://www.ncbi.nlm.nih.gov/books/NBK25499/#chapter4.ESearch Some useful
parameters to pass are db='pmc' to search PMC instead of pubmed reldate=2
to search for papers within the last 2 days mindate='2016/03/01',
maxdate='2016/03/31' to search for papers in March 2016.
PubMed, by default, limits returned PMIDs to a small number, and this
number can be controlled by the "retmax" parameter. This function
uses a retmax value of 100,000 by default that can be changed via the
corresponding keyword argument.
Parameters
----------
search_term : str
A term for which the PubMed search should be performed.
use_text_word : Optional[bool]
If True, the "[tw]" string is appended to the search term to constrain
the search to "text words", that is words that appear as whole
in relevant parts of the PubMed entry (excl. for instance the journal
name or publication date) like the title and abstract. Using this
option can eliminate spurious search results such as all articles
published in June for a search for the "JUN" gene, or journal names
that contain Acad for a search for the "ACAD" gene.
See also: https://www.nlm.nih.gov/bsd/disted/pubmedtutorial/020_760.html
Default : True
kwargs : kwargs
Additional keyword arguments to pass to the PubMed search as
parameters.
"""
use_text_word = kwargs.pop('use_text_word', True)
if use_text_word:
search_term += '[tw]'
params = {'term': search_term,
'retmax': 100000,
'retstart': 0,
'db': 'pubmed',
'sort': 'pub+date'}
params.update(kwargs)
tree = send_request(pubmed_search, params)
if tree is None:
return []
if tree.find('ERROR') is not None:
logger.error(tree.find('ERROR').text)
return []
if tree.find('ErrorList') is not None:
for err in tree.find('ErrorList'):
logger.error('Error - %s: %s' % (err.tag, err.text))
return []
count = int(tree.find('Count').text)
id_terms = tree.findall('IdList/Id')
if id_terms is None:
return []
ids = [idt.text for idt in id_terms]
if count != len(ids):
logger.warning('Not all ids were retrieved for search %s;\n'
'limited at %d.' % (search_term, params['retmax']))
return ids
def get_id_count(search_term):
"""Get the number of citations in Pubmed for a search query.
Parameters
----------
search_term : str
A term for which the PubMed search should be performed.
Returns
-------
int or None
The number of citations for the query, or None if the query fails.
"""
params = {'term': search_term,
'rettype': 'count',
'db': 'pubmed'}
tree = send_request(pubmed_search, params)
if tree is None:
return None
else:
count = list(tree)[0].text
return int(count)
@lru_cache(maxsize=100)
def get_ids_for_gene(hgnc_name, **kwargs):
"""Get the curated set of articles for a gene in the Entrez database.
Search parameters for the Gene database query can be passed in as
keyword arguments.
Parameters
----------
hgnc_name : str
The HGNC name of the gene. This is used to obtain the HGNC ID
(using the hgnc_client module) and in turn used to obtain the Entrez
ID associated with the gene. Entrez is then queried for that ID.
"""
from indra.databases import hgnc_client
# Get the HGNC ID for the HGNC name
hgnc_id = hgnc_client.get_hgnc_id(hgnc_name)
if hgnc_id is None:
raise ValueError('Invalid HGNC name.')
# Get the Entrez ID
entrez_id = hgnc_client.get_entrez_id(hgnc_id)
if entrez_id is None:
raise ValueError('Entrez ID not found in HGNC table.')
# Query the Entrez Gene database
params = {'db': 'gene',
'retmode': 'xml',
'id': entrez_id}
params.update(kwargs)
tree = send_request(pubmed_fetch, params)
if tree is None:
return []
if tree.find('ERROR') is not None:
logger.error(tree.find('ERROR').text)
return []
# Get all PMIDs from the XML tree
id_terms = tree.findall('.//PubMedId')
if id_terms is None:
return []
# Use a set to remove duplicate IDs
ids = list(set([idt.text for idt in id_terms]))
return ids
def get_ids_for_mesh(mesh_id, major_topic=False, **kwargs):
"""Return PMIDs that are annotated with a given MeSH ID.
Parameters
----------
mesh_id : str
The MeSH ID of a term to search for, e.g., D009101.
major_topic : bool
If True, only papers for which the given MeSH ID is annotated as
a major topic are returned. Otherwise all annotations are considered.
Default: False
**kwargs
Any further PudMed search arguments that are passed to
get_ids.
"""
from indra.databases import mesh_client
mesh_name = mesh_client.get_mesh_name(mesh_id)
if not mesh_name:
logger.error('Could not get MeSH name for ID %s' % mesh_id)
return []
suffix = 'majr' if major_topic else 'mh'
search_term = '%s [%s]' % (mesh_name, suffix)
ids = get_ids(search_term, use_text_word=False, **kwargs)
if mesh_id.startswith('C') and not major_topic:
# Get pmids for supplementary concepts as well
search_term = '%s [nm]' % mesh_name
ids2 = get_ids(search_term, use_text_word=False, **kwargs)
ids = list(set(ids) | set(ids2))
return ids
def get_article_xml(pubmed_id):
"""Get the Article subtree a single article from the Pubmed database.
Parameters
----------
pubmed_id : str
A PubMed ID.
Returns
-------
xml.etree.ElementTree.Element
The XML ElementTree Element that represents the Article portion of the
PubMed entry.
"""
full_xml_tree = get_full_xml(pubmed_id)
if full_xml_tree is None:
return None
article = full_xml_tree.find('PubmedArticle/MedlineCitation/Article')
return article # May be none
@lru_cache(maxsize=100)
def get_full_xml(pubmed_id):
"""Get the full XML tree of a single article from the Pubmed database.
Parameters
----------
pubmed_id : str
A PubMed ID.
Returns
-------
xml.etree.ElementTree.Element
The root element of the XML tree representing the PubMed entry.
The root is a PubmedArticleSet with a single PubmedArticle element
that contains the article metadata.
"""
if pubmed_id.upper().startswith('PMID'):
pubmed_id = pubmed_id[4:]
params = {'db': 'pubmed',
'retmode': 'xml',
'id': pubmed_id}
tree = send_request(pubmed_fetch, params)
return tree
def get_title(pubmed_id):
"""Get the title of an article in the Pubmed database."""
article = get_article_xml(pubmed_id)
if article is None:
return None
return _get_title_from_article_element(article)
def _get_title_from_article_element(article):
title_tag = article.find('ArticleTitle')
title = None
if title_tag is not None:
title = title_tag.text
if hasattr(title_tag, 'itertext'):
title = ''.join(list(title_tag.itertext()))
return title
def _abstract_from_article_element(article, prepend_title=False):
abstract = article.findall('Abstract/AbstractText')
if abstract is None:
return None
abstract_text = ' '.join(['' if not hasattr(abst, 'itertext')
else ' '.join(list(abst.itertext()))
for abst in abstract])
if prepend_title:
title = _get_title_from_article_element(article)
if title is not None:
if not title.endswith('.'):
title += '.'
abstract_text = title + ' ' + abstract_text
return abstract_text
def get_abstract(pubmed_id, prepend_title=True):
"""Get the abstract of an article in the Pubmed database."""
article = get_article_xml(pubmed_id)
if article is None:
return None
return _abstract_from_article_element(article, prepend_title)
# A function to get the text for the element, or None if not found
def _find_elem_text(root, xpath_string):
elem = root.find(xpath_string)
return None if elem is None else elem.text
def _get_journal_info(medline_citation, get_issns_from_nlm):
# Journal info
journal = medline_citation.find('Article/Journal')
journal_title = _find_elem_text(journal, 'Title')
journal_abbrev = _find_elem_text(journal, 'ISOAbbreviation')
# Add the ISSN from the article record
issn_list = []
issn = _find_elem_text(journal, 'ISSN')
if issn:
issn_list.append(issn)
# Add the Linking ISSN from the article record
issn_linking = _find_elem_text(medline_citation,
'MedlineJournalInfo/ISSNLinking')
if issn_linking:
issn_list.append(issn_linking)
# Now get the list of ISSNs from the NLM Catalog
nlm_id = _find_elem_text(medline_citation,
'MedlineJournalInfo/NlmUniqueID')
if nlm_id and get_issns_from_nlm:
nlm_issn_list = get_issns_for_journal(nlm_id)
if nlm_issn_list:
issn_list += nlm_issn_list
# Remove any duplicate issns
issn_list = list(set(issn_list))
return {'journal_title': journal_title, 'journal_abbrev': journal_abbrev,
'issn_list': issn_list, 'journal_nlm_id': nlm_id}
def _get_pubmed_publication_date(pubmed_data):
date_dict = dict.fromkeys(['year', 'month', 'day'])
# Order potential statuses in order of preferences
status_list = ['pubmed', 'accepted', 'revised', 'received', 'entrez']
# Look for various statuses, in order of preference as PubStatus in
# PubmedPubDate
for status in status_list:
pubmed_pub_date = \
pubmed_data.find('./History/PubMedPubDate[@PubStatus="%s"]'
% status)
if pubmed_pub_date is not None:
break
else:
logger.warning("Could not find pub date in: \n%s"
% ET.tostring(pubmed_data).decode('utf-8'))
return date_dict
def _find_date(element):
value = _find_elem_text(pubmed_pub_date, element)
return int(value) if value else None
# Get date elements from extracted pubmed_pub_date element
for date_elem in ['Year', 'Month', 'Day']:
date_dict[date_elem.lower()] = _find_date(date_elem)
return date_dict
def _get_article_info(medline_citation, pubmed_data):
article = medline_citation.find('Article')
pmid = _find_elem_text(medline_citation, './PMID')
pii = _find_elem_text(article,
'./ELocationID[@EIdType="pii"][@ValidYN="Y"]')
# Look for the DOI in the ELocationID field...
doi = _find_elem_text(article,
'./ELocationID[@EIdType="doi"][@ValidYN="Y"]')
# ...and if that doesn't work, look in the ArticleIdList
if doi is None:
doi = _find_elem_text(pubmed_data, './/ArticleId[@IdType="doi"]')
# Try to get the PMCID
pmcid = _find_elem_text(pubmed_data, './/ArticleId[@IdType="pmc"]')
# Title
title = _get_title_from_article_element(article)
# Author list
author_elems = article.findall('AuthorList/Author/LastName')
author_names = None if author_elems is None \
else [au.text for au in author_elems]
# Get the page number entry
page = _find_elem_text(article, 'Pagination/MedlinePgn')
return {'pmid': pmid, 'pii': pii, 'doi': doi, 'pmcid': pmcid,
'title': title, 'authors': author_names, 'page': page}
def get_metadata_from_xml_tree(tree, get_issns_from_nlm=False,
get_abstracts=False, prepend_title=False,
mesh_annotations=True):
"""Get metadata for an XML tree containing PubmedArticle elements.
Documentation on the XML structure can be found at:
- https://www.nlm.nih.gov/bsd/licensee/elements_descriptions.html
- https://www.nlm.nih.gov/bsd/licensee/elements_alphabetical.html
Parameters
----------
tree : xml.etree.ElementTree
ElementTree containing one or more PubmedArticle elements.
get_issns_from_nlm : Optional[bool]
Look up the full list of ISSN number for the journal associated with
the article, which helps to match articles to CrossRef search results.
Defaults to False, since it slows down performance.
get_abstracts : Optional[bool]
Indicates whether to include the Pubmed abstract in the results.
Default: False
prepend_title : Optional[bool]
If get_abstracts is True, specifies whether the article title should
be prepended to the abstract text. Default: False
mesh_annotations : Optional[bool]
If True, extract mesh annotations from the pubmed entries and include
in the returned data. If false, don't. Default: True
Returns
-------
dict of dicts
Dictionary indexed by PMID. Each value is a dict containing the
following fields: 'doi', 'title', 'authors', 'journal_title',
'journal_abbrev', 'journal_nlm_id', 'issn_list', 'page'.
"""
# Iterate over the articles and build the results dict
results = {}
pm_articles = tree.findall('./PubmedArticle')
for art_ix, pm_article in enumerate(pm_articles):
medline_citation = pm_article.find('./MedlineCitation')
pubmed_data = pm_article.find('PubmedData')
# Build the result
result = {}
article_info = _get_article_info(medline_citation, pubmed_data)
result.update(article_info)
journal_info = _get_journal_info(medline_citation, get_issns_from_nlm)
result.update(journal_info)
if mesh_annotations:
context_info = _get_annotations(medline_citation)
result.update(context_info)
publication_date = _get_pubmed_publication_date(pubmed_data)
result['publication_date'] = publication_date
# Get the abstracts if requested
if get_abstracts:
abstract = _abstract_from_article_element(
medline_citation.find('Article'),
prepend_title=prepend_title
)
result['abstract'] = abstract
# Add to dict
results[article_info['pmid']] = result
return results
def get_mesh_annotations(pmid):
"""Return a list of MeSH annotations for a given PubMed ID.
Parameters
----------
pmid : str
A PubMed ID.
Returns
-------
list of dict
A list of dicts that represent MeSH annotations with the following keys:
"mesh" representing the MeSH ID, "text" the standrd name associated with
the MeSH ID, "major_topic" a boolean flag set depending on whether
the given MeSH ID is assigned as a major topic to the article, and
"qualifier" which is a MeSH qualifier ID associated with the annotation,
if available, otherwise None.
"""
full_xml_tree = get_full_xml(pmid)
if not full_xml_tree:
return None
medline_citation = full_xml_tree.find('PubmedArticle/MedlineCitation')
if not medline_citation:
return None
annotations = _get_annotations(medline_citation)
return annotations.get('mesh_annotations')
def _get_annotations(medline_citation):
def _major_topic(e):
if e is not None and e.get('MajorTopicYN').upper() == 'Y':
return True
return False
info = []
for elem in medline_citation.findall('.//MeshHeading'):
dname = elem.find('DescriptorName')
qualifier_elems = elem.findall('QualifierName')
mid = dname.attrib['UI']
major = _major_topic(dname) or any(_major_topic(qual) for qual
in qualifier_elems)
qualifiers = [{'text': qual.text, 'mesh': qual.attrib['UI']}
for qual in qualifier_elems]
qual = qualifiers[0] if qualifiers else None
info.append({'type': 'main', 'mesh': mid, 'text': dname.text,
'major_topic': major,
# This is only here for backwards compatibility with
# INDRA DB which expects a single qualifier or None and
# turns the single qualifier into an int internally, so
# we can't easily put a joined string of multiple
# qualifiers here.
'qualifier': qual,
# This is the proper full list of qualifiers
'qualifiers': qualifiers})
for elem in medline_citation.findall('.//SupplMeshList/SupplMeshName'):
info.append({'type': 'supplementary', 'mesh': elem.attrib['UI'], 'text': elem.text,
'qualifier': None, 'qualifiers': [],
'major_topic': False})
return {'mesh_annotations': info}
def get_metadata_for_ids(pmid_list, get_issns_from_nlm=False,
get_abstracts=False, prepend_title=False):
"""Get article metadata for up to 200 PMIDs from the Pubmed database.
Parameters
----------
pmid_list : list of str
Can contain 1-200 PMIDs.
get_issns_from_nlm : bool
Look up the full list of ISSN number for the journal associated with
the article, which helps to match articles to CrossRef search results.
Defaults to False, since it slows down performance.
get_abstracts : bool
Indicates whether to include the Pubmed abstract in the results.
prepend_title : bool
If get_abstracts is True, specifies whether the article title should
be prepended to the abstract text.
Returns
-------
dict of dicts
Dictionary indexed by PMID. Each value is a dict containing the
following fields: 'doi', 'title', 'authors', 'journal_title',
'journal_abbrev', 'journal_nlm_id', 'issn_list', 'page'.
"""
if len(pmid_list) > 200:
raise ValueError("Metadata query is limited to 200 PMIDs at a time.")
params = {'db': 'pubmed',
'retmode': 'xml',
'id': pmid_list}
tree = send_request(pubmed_fetch, params)
if tree is None:
return None
return get_metadata_from_xml_tree(tree, get_issns_from_nlm, get_abstracts,
prepend_title)
@lru_cache(maxsize=1000)
def get_issns_for_journal(nlm_id):
"""Get a list of the ISSN numbers for a journal given its NLM ID.
Information on NLM XML DTDs is available at
https://www.nlm.nih.gov/databases/dtd/
"""
params = {'db': 'nlmcatalog',
'retmode': 'xml',
'id': nlm_id}
tree = send_request(pubmed_fetch, params)
if tree is None:
return None
issn_list = tree.findall('.//ISSN')
issn_linking = tree.findall('.//ISSNLinking')
issns = issn_list + issn_linking
# No ISSNs found!
if not issns:
return None
else:
return [issn.text for issn in issns]
def expand_pagination(pages):
"""Convert a page number to long form, e.g., from 456-7 to 456-457."""
# If there is no hyphen, it's a single page, and we're good to go
parts = pages.split('-')
if len(parts) == 1: # No hyphen, so no split
return pages
elif len(parts) == 2:
start = parts[0]
end = parts[1]
# If the end is the same number of digits as the start, then we
# don't change anything!
if len(start) == len(end):
return pages
# Otherwise, replace the last digits of start with the digits of end
num_end_digits = len(end)
new_end = start[:-num_end_digits] + end
return '%s-%s' % (start, new_end)
else: # More than one hyphen, something weird happened
logger.warning("Multiple hyphens in page number: %s" % pages)
return pages
def get_substance_annotations(pubmed_id: str) -> List[str]:
"""Return substance MeSH ID for a given PubMedID.
Note that substance annotations often overlap with MeSH annotations,
however, there are cases where a substance annotation is not available
under MeSH annotations.
Parameters
----------
pubmed_id :
PubMedID ID whose substance MeSH ID will be returned.
Returns
-------
:
Substance MeSH IDs corresponding to the given PubMed paper or
if None present or a failed query, an empty list will be returned.
"""
root = get_full_xml(pubmed_id)
nodes = root.findall('.//MedlineCitation/ChemicalList')
if len(nodes) == 0:
logger.error('Could not retrieve substance MeSH IDs for %s' % pubmed_id)
return []
uid = [b.attrib.get('UI') for node in nodes
for c in list(node) for b in c.iter('*')
if 'UI' in b.attrib]
return uid
|
sorgerlab/belpy
|
indra/literature/pubmed_client.py
|
Python
|
mit
| 22,934 | 0.000392 |
__author__ = 'Victoria'
#Decorator Pattern in Characters
class BarDecorators(Barbarian):
pass
class ImageBarDecorator(BarDecorators):
def __init__(self, decorated, picFile):
self.decorated = decorated
self.picFile = picFile
super(ImageBarDecorator, self).__init__(self.decorated.canvas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
class FastBarMoveDecorator(BarDecorators):
def __init__(self, decorated):
self.decorated = decorated()
super(FastBarMoveDecorator, self).__init__(self.decorated.canvas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
#Decorator Pattern in Monsters
class DragDecorators(Dragon):
pass
class ImageDragDecorator(DragDecorators):
def __init__(self, decorated, picFile):
self.decorated = decorated
self.picFile = picFile
super(ImageDragDecorator, self).__init__(self.decorated.canvas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
class FastDragMoveDecorator(DragDecorators):
def __init__(self, decorated):
self.decorated = decorated
super(FastDragMoveDecorator, self).__init__(self.decorated.canvas,
self.decorated.positionX, self.decorated.positionY, self.decorated.name, self.decorated.picFile)
|
victorianorton/SimpleRPGGame
|
src/game/Decorators.py
|
Python
|
mit
| 1,582 | 0.007585 |
def somefunc():
pass
|
level12/blazeweb
|
tests/apps/nlsupporting/components/news/__init__.py
|
Python
|
bsd-3-clause
| 26 | 0 |
import os
import sys
import subprocess
""" Use gpg to decrypt password.
"""
def mailpasswd(path):
cmd = "gpg --quiet --batch --use-agent --decrypt --output - " + os.path.expanduser(path)
try:
return subprocess.check_output(cmd, shell=True).strip()
except subprocess.CalledProcessError:
return ""
# get password either from gpg file (when run from shell) or from stdin (when run from imapfilter)
def get_passwd_check_ppid(path):
# get parent process cmdline
f = open("/proc/%s/cmdline" % os.getppid(), "r")
cmdline = f.read()
f.close()
# check if run from imapfilter
if "imapfilter" in cmdline:
return raw_input()
else:
return mailpasswd(path)
# mapping for nametrans
# dictionary of strings {<remote>: <local>, ...} shape, where <remote> is mapped to <local>
mapping_fjfi = {
'INBOX' : 'INBOX',
'Drafts' : 'drafts',
'Sent Items' : 'sent',
'Deleted Items' : 'trash',
'Junk E-Mail' : 'spam',
}
mapping_gmail = {
'INBOX' : 'INBOX',
'[Gmail]/Drafts' : 'drafts',
'[Gmail]/Sent Mail' : 'sent',
'[Gmail]/Bin' : 'trash',
'[Gmail]/Spam' : 'spam',
}
mapping_gmx = {
'INBOX' : 'INBOX',
'Drafts' : 'drafts',
'Sent' : 'sent',
'Spam' : 'spam',
'Trash' : 'trash',
'arch' : 'arch',
'aur-general' : 'aur-general',
'arch-general' : 'arch-general',
'arch-wiki' : 'arch-wiki',
'mw' : 'mw',
}
# values from mapping_* dicts with high priority
prio_queue_fjfi = ['INBOX']
prio_queue_gmail = ['INBOX']
prio_queue_gmx = ['INBOX', 'arch', 'arch-wiki', 'arch-general', 'aur-general']
def nt_remote(mapping):
def inner(folder):
try:
return mapping[folder]
except:
return folder
return inner
def nt_local(mapping):
r_mapping = dict(zip(mapping.values(), mapping.keys()))
def inner(folder):
try:
return r_mapping[folder]
except:
return folder
return inner
# return False if folder not in mapping.keys()
def exclude(mapping):
def inner(folder):
if folder in mapping.keys():
return True
return False
return inner
# compare by position in queue (mapping_*.values())
def fd_priority(prio_queue):
def inner(x, y):
if x in prio_queue and y in prio_queue:
return cmp(prio_queue.index(x), prio_queue.index(y))
elif x in prio_queue:
return -1
elif y in prio_queue:
return 1
else:
return 0
return inner
|
ajaybhatia/archlinux-dotfiles
|
home/.config/offlineimap/offlineimap-helpers.py
|
Python
|
mit
| 2,790 | 0.010394 |
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 7 18:08:41 2011
@author: steven
"""
# word = 'banana'
# count = 0
# for letter in word:
# if letter == 'a':
# count = count + 1
# print count
# Rewrite this function so that instead of traversing the string, it uses the
# three-parameter version of find from the previous section.
# Current Status: Complete
def find(letter, word, index):
while index < len(word):
if word[index] == letter:
return index
index += 1
return -1
def count(letter, word):
counter = 0
index = 0
while index < len(word):
result = find(letter, word, index)
if result == -1:
return counter
else:
counter += 1
index = result + 1
return counter
print count("n", "Think Python")
|
epequeno/ThinkPy-Solutions
|
ch08/8.06.py
|
Python
|
gpl-3.0
| 828 | 0.002415 |
# Docker-specific local settings
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db',
}
}
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
TEMPLATE_DIRS = (
'/srv/webldap/templates',
)
EMAIL_FROM = 'root@localhost'
REQ_EXPIRE_HRS = 48
REQ_EXPIRE_STR = '48 heures'
LDAP_URI = 'ldap://{}:{}'.format(os.environ['LDAP_PORT_389_TCP_ADDR'],
os.environ['LDAP_PORT_389_TCP_PORT'])
LDAP_STARTTLS = False
LDAP_CACERT = ''
LDAP_BASE = 'dc=example,dc=net'
LDAP_WEBLDAP_USER = 'cn=webldap,ou=service-users,dc=example,dc=net'
LDAP_WEBLDAP_PASSWD = 'secret'
LDAP_DEFAULT_GROUPS = []
LDAP_DEFAULT_ROLES = ['member']
|
FedeRez/webldap
|
app/webldap/local_settings.docker.py
|
Python
|
mit
| 764 | 0 |
class DataLabels(object):
def __init__(self, enabled=True):
self.enabled = enabled
def show_labels(self, enable):
if not type(enable) is bool:
raise TypeError('enable should be a boolean (True or False).')
self.enabled = enable
def to_javascript(self):
jsc = "dataLabels: {"
jsc += "enabled: "
if self.enabled:
jsc += "true"
else:
jsc += "false"
jsc += "}"
return jsc
|
jpmfribeiro/PyCharts
|
pycharts/fields/plot_options/data_labels.py
|
Python
|
mit
| 493 | 0.004057 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Functional test case that utilizes httplib2 against the API server"""
import hashlib
import httplib2
from glance.openstack.common import jsonutils
from glance.openstack.common import units
from glance.tests import functional
from glance.tests.utils import minimal_headers
from glance.tests.utils import skip_if_disabled
FIVE_KB = 5 * units.Ki
FIVE_GB = 5 * units.Gi
class TestApi(functional.FunctionalTest):
"""Functional tests using httplib2 against the API server"""
@skip_if_disabled
def test_get_head_simple_post(self):
"""
We test the following sequential series of actions:
0. GET /images
- Verify no public images
1. GET /images/detail
- Verify no public images
2. POST /images with public image named Image1
and no custom properties
- Verify 201 returned
3. HEAD image
- Verify HTTP headers have correct information we just added
4. GET image
- Verify all information on image we just added is correct
5. GET /images
- Verify the image we just added is returned
6. GET /images/detail
- Verify the image we just added is returned
7. PUT image with custom properties of "distro" and "arch"
- Verify 200 returned
8. PUT image with too many custom properties
- Verify 413 returned
9. GET image
- Verify updated information about image was stored
10. PUT image
- Remove a previously existing property.
11. PUT image
- Add a previously deleted property.
12. PUT image/members/member1
- Add member1 to image
13. PUT image/members/member2
- Add member2 to image
14. GET image/members
- List image members
15. DELETE image/members/member1
- Delete image member1
16. PUT image/members
- Attempt to replace members with an overlimit amount
17. PUT image/members/member11
- Attempt to add a member while at limit
18. POST /images with another public image named Image2
- attribute and three custom properties, "distro", "arch" & "foo"
- Verify a 200 OK is returned
19. HEAD image2
- Verify image2 found now
20. GET /images
- Verify 2 public images
21. GET /images with filter on user-defined property "distro".
- Verify both images are returned
22. GET /images with filter on user-defined property 'distro' but
- with non-existent value. Verify no images are returned
23. GET /images with filter on non-existent user-defined property
- "boo". Verify no images are returned
24. GET /images with filter 'arch=i386'
- Verify only image2 is returned
25. GET /images with filter 'arch=x86_64'
- Verify only image1 is returned
26. GET /images with filter 'foo=bar'
- Verify only image2 is returned
27. DELETE image1
- Delete image
28. GET image/members
- List deleted image members
29. PUT image/members/member2
- Update existing member2 of deleted image
30. PUT image/members/member3
- Add member3 to deleted image
31. DELETE image/members/member2
- Delete member2 from deleted image
32. DELETE image2
- Delete image
33. GET /images
- Verify no images are listed
"""
self.cleanup()
self.start_servers(**self.__dict__.copy())
# 0. GET /images
# Verify no public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 1. GET /images/detail
# Verify no public images
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
self.assertEqual(content, '{"images": []}')
# 2. POST /images with public image named Image1
# attribute and no custom properties. Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image1')
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = jsonutils.loads(content)
image_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image1")
self.assertEqual(data['image']['is_public'], True)
# 3. HEAD image
# Verify image found now
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image1")
# 4. GET image
# Verify all information on image we just added is correct
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image_headers = {
'x-image-meta-id': image_id,
'x-image-meta-name': 'Image1',
'x-image-meta-is_public': 'True',
'x-image-meta-status': 'active',
'x-image-meta-disk_format': 'raw',
'x-image-meta-container_format': 'ovf',
'x-image-meta-size': str(FIVE_KB)}
expected_std_headers = {
'content-length': str(FIVE_KB),
'content-type': 'application/octet-stream'}
for expected_key, expected_value in expected_image_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
for expected_key, expected_value in expected_std_headers.items():
self.assertEqual(response[expected_key], expected_value,
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
response[expected_key]))
self.assertEqual(content, "*" * FIVE_KB)
self.assertEqual(hashlib.md5(content).hexdigest(),
hashlib.md5("*" * FIVE_KB).hexdigest())
# 5. GET /images
# Verify one public image
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_result = {"images": [
{"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"name": "Image1",
"checksum": "c2e5db72bd7fd153f53ede5da5a06de3",
"size": 5120}]}
self.assertEqual(jsonutils.loads(content), expected_result)
# 6. GET /images/detail
# Verify image and all its metadata
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {},
"size": 5120}
image = jsonutils.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
image['images'][0][expected_key]))
# 7. PUT image with custom properties of "distro" and "arch"
# Verify 200 returned
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = jsonutils.loads(content)
self.assertEqual(data['image']['properties']['arch'], "x86_64")
self.assertEqual(data['image']['properties']['distro'], "Ubuntu")
# 8. PUT image with too many custom properties
# Verify 413 returned
headers = {}
for i in range(11): # configured limit is 10
headers['X-Image-Meta-Property-foo%d' % i] = 'bar'
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 413)
# 9. GET /images/detail
# Verify image and all its metadata
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
expected_image = {
"status": "active",
"name": "Image1",
"deleted": False,
"container_format": "ovf",
"disk_format": "raw",
"id": image_id,
"is_public": True,
"deleted_at": None,
"properties": {'distro': 'Ubuntu', 'arch': 'x86_64'},
"size": 5120}
image = jsonutils.loads(content)
for expected_key, expected_value in expected_image.items():
self.assertEqual(expected_value, image['images'][0][expected_key],
"For key '%s' expected header value '%s'. "
"Got '%s'" % (expected_key,
expected_value,
image['images'][0][expected_key]))
# 10. PUT image and remove a previously existing property.
headers = {'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = jsonutils.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 1)
self.assertEqual(data['properties']['arch'], "x86_64")
# 11. PUT image and add a previously deleted property.
headers = {'X-Image-Meta-Property-Distro': 'Ubuntu',
'X-Image-Meta-Property-Arch': 'x86_64'}
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'PUT', headers=headers)
self.assertEqual(response.status, 200)
data = jsonutils.loads(content)
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = jsonutils.loads(content)['images'][0]
self.assertEqual(len(data['properties']), 2)
self.assertEqual(data['properties']['arch'], "x86_64")
self.assertEqual(data['properties']['distro'], "Ubuntu")
self.assertNotEqual(data['created_at'], data['updated_at'])
# 12. Add member to image
path = ("http://%s:%d/v1/images/%s/members/pattieblack" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT')
self.assertEqual(response.status, 204)
# 13. Add member to image
path = ("http://%s:%d/v1/images/%s/members/pattiewhite" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT')
self.assertEqual(response.status, 204)
# 14. List image members
path = ("http://%s:%d/v1/images/%s/members" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
data = jsonutils.loads(content)
self.assertEqual(len(data['members']), 2)
self.assertEqual(data['members'][0]['member_id'], 'pattieblack')
self.assertEqual(data['members'][1]['member_id'], 'pattiewhite')
# 15. Delete image member
path = ("http://%s:%d/v1/images/%s/members/pattieblack" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 204)
# 16. Attempt to replace members with an overlimit amount
# Adding 11 image members should fail since configured limit is 10
path = ("http://%s:%d/v1/images/%s/members" %
("127.0.0.1", self.api_port, image_id))
memberships = []
for i in range(11):
member_id = "foo%d" % i
memberships.append(dict(member_id=member_id))
http = httplib2.Http()
body = jsonutils.dumps(dict(memberships=memberships))
response, content = http.request(path, 'PUT', body=body)
self.assertEqual(response.status, 413)
# 17. Attempt to add a member while at limit
# Adding an 11th member should fail since configured limit is 10
path = ("http://%s:%d/v1/images/%s/members" %
("127.0.0.1", self.api_port, image_id))
memberships = []
for i in range(10):
member_id = "foo%d" % i
memberships.append(dict(member_id=member_id))
http = httplib2.Http()
body = jsonutils.dumps(dict(memberships=memberships))
response, content = http.request(path, 'PUT', body=body)
self.assertEqual(response.status, 204)
path = ("http://%s:%d/v1/images/%s/members/fail_me" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT')
self.assertEqual(response.status, 413)
# 18. POST /images with another public image named Image2
# attribute and three custom properties, "distro", "arch" & "foo".
# Verify a 200 OK is returned
image_data = "*" * FIVE_KB
headers = minimal_headers('Image2')
headers['X-Image-Meta-Property-Distro'] = 'Ubuntu'
headers['X-Image-Meta-Property-Arch'] = 'i386'
headers['X-Image-Meta-Property-foo'] = 'bar'
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'POST', headers=headers,
body=image_data)
self.assertEqual(response.status, 201)
data = jsonutils.loads(content)
image2_id = data['image']['id']
self.assertEqual(data['image']['checksum'],
hashlib.md5(image_data).hexdigest())
self.assertEqual(data['image']['size'], FIVE_KB)
self.assertEqual(data['image']['name'], "Image2")
self.assertEqual(data['image']['is_public'], True)
self.assertEqual(data['image']['properties']['distro'], 'Ubuntu')
self.assertEqual(data['image']['properties']['arch'], 'i386')
self.assertEqual(data['image']['properties']['foo'], 'bar')
# 19. HEAD image2
# Verify image2 found now
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image2_id)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(response.status, 200)
self.assertEqual(response['x-image-meta-name'], "Image2")
# 20. GET /images
# Verify 2 public images
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 2)
self.assertEqual(images[0]['id'], image2_id)
self.assertEqual(images[1]['id'], image_id)
# 21. GET /images with filter on user-defined property 'distro'.
# Verify both images are returned
path = "http://%s:%d/v1/images?property-distro=Ubuntu" % \
("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 2)
self.assertEqual(images[0]['id'], image2_id)
self.assertEqual(images[1]['id'], image_id)
# 22. GET /images with filter on user-defined property 'distro' but
# with non-existent value. Verify no images are returned
path = "http://%s:%d/v1/images?property-distro=fedora" % \
("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 0)
# 23. GET /images with filter on non-existent user-defined property
# 'boo'. Verify no images are returned
path = "http://%s:%d/v1/images?property-boo=bar" % ("127.0.0.1",
self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 0)
# 24. GET /images with filter 'arch=i386'
# Verify only image2 is returned
path = "http://%s:%d/v1/images?property-arch=i386" % ("127.0.0.1",
self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 1)
self.assertEqual(images[0]['id'], image2_id)
# 25. GET /images with filter 'arch=x86_64'
# Verify only image1 is returned
path = "http://%s:%d/v1/images?property-arch=x86_64" % ("127.0.0.1",
self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 1)
self.assertEqual(images[0]['id'], image_id)
# 26. GET /images with filter 'foo=bar'
# Verify only image2 is returned
path = "http://%s:%d/v1/images?property-foo=bar" % ("127.0.0.1",
self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 1)
self.assertEqual(images[0]['id'], image2_id)
# 27. DELETE image1
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
# 28. Try to list members of deleted image
path = ("http://%s:%d/v1/images/%s/members" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 404)
# 29. Try to update member of deleted image
path = ("http://%s:%d/v1/images/%s/members" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
fixture = [{'member_id': 'pattieblack', 'can_share': 'false'}]
body = jsonutils.dumps(dict(memberships=fixture))
response, content = http.request(path, 'PUT', body=body)
self.assertEqual(response.status, 404)
# 30. Try to add member to deleted image
path = ("http://%s:%d/v1/images/%s/members/chickenpattie" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'PUT')
self.assertEqual(response.status, 404)
# 31. Try to delete member of deleted image
path = ("http://%s:%d/v1/images/%s/members/pattieblack" %
("127.0.0.1", self.api_port, image_id))
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 404)
# 32. DELETE image2
path = "http://%s:%d/v1/images/%s" % ("127.0.0.1", self.api_port,
image2_id)
http = httplib2.Http()
response, content = http.request(path, 'DELETE')
self.assertEqual(response.status, 200)
# 33. GET /images
# Verify no images are listed
path = "http://%s:%d/v1/images" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(response.status, 200)
images = jsonutils.loads(content)['images']
self.assertEqual(len(images), 0)
# 34. HEAD /images/detail
path = "http://%s:%d/v1/images/detail" % ("127.0.0.1", self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'HEAD')
self.assertEqual(405, response.status)
self.assertEqual('GET', response.get('allow'))
self.stop_servers()
|
tanglei528/glance
|
glance/tests/functional/v1/test_api.py
|
Python
|
apache-2.0
| 24,300 | 0 |
# Copyright 2017 ForgeFlow S.L.
# Copyright 2021 Tecnativa - Víctor Martínez
# License AGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import _, fields
from odoo.exceptions import UserError, ValidationError
from odoo.fields import Date
from odoo.tests.common import Form, SavepointCase
class TestAccountPaymentPartner(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.res_users_model = cls.env["res.users"]
cls.move_model = cls.env["account.move"]
cls.journal_model = cls.env["account.journal"]
cls.payment_mode_model = cls.env["account.payment.mode"]
cls.partner_bank_model = cls.env["res.partner.bank"]
# Refs
cls.company = cls.env.ref("base.main_company")
cls.acct_type_payable = cls.env.ref("account.data_account_type_payable")
cls.acct_type_receivable = cls.env.ref("account.data_account_type_receivable")
cls.acct_type_expenses = cls.env.ref("account.data_account_type_expenses")
cls.company_2 = cls.env["res.company"].create({"name": "Company 2"})
charts = cls.env["account.chart.template"].search([])
if charts:
cls.chart = charts[0]
else:
raise ValidationError(_("No Chart of Account Template has been defined !"))
old_company = cls.env.user.company_id
cls.env.user.company_id = cls.company_2.id
cls.chart.try_loading()
cls.env.user.company_id = old_company.id
# refs
cls.manual_out = cls.env.ref("account.account_payment_method_manual_out")
cls.manual_out.bank_account_required = True
cls.manual_in = cls.env.ref("account.account_payment_method_manual_in")
cls.journal_sale = cls.env["account.journal"].create(
{
"name": "Test Sales Journal",
"code": "tSAL",
"type": "sale",
"company_id": cls.company.id,
}
)
cls.journal_purchase = cls.env["account.journal"].create(
{
"name": "Test Purchases Journal",
"code": "tPUR",
"type": "purchase",
"company_id": cls.company.id,
}
)
cls.journal_c1 = cls.journal_model.create(
{
"name": "J1",
"code": "J1",
"type": "bank",
"company_id": cls.company.id,
"bank_acc_number": "123456",
}
)
cls.journal_c2 = cls.journal_model.create(
{
"name": "J2",
"code": "J2",
"type": "bank",
"company_id": cls.company_2.id,
"bank_acc_number": "552344",
}
)
cls.supplier_payment_mode = cls.payment_mode_model.create(
{
"name": "Suppliers Bank 1",
"bank_account_link": "variable",
"payment_method_id": cls.manual_out.id,
"show_bank_account_from_journal": True,
"company_id": cls.company.id,
"fixed_journal_id": cls.journal_c1.id,
"variable_journal_ids": [(6, 0, [cls.journal_c1.id])],
}
)
cls.supplier_payment_mode_c2 = cls.payment_mode_model.create(
{
"name": "Suppliers Bank 2",
"bank_account_link": "variable",
"payment_method_id": cls.manual_out.id,
"company_id": cls.company_2.id,
"fixed_journal_id": cls.journal_c2.id,
"variable_journal_ids": [(6, 0, [cls.journal_c2.id])],
}
)
cls.customer_payment_mode = cls.payment_mode_model.create(
{
"name": "Customers to Bank 1",
"bank_account_link": "fixed",
"payment_method_id": cls.manual_in.id,
"company_id": cls.company.id,
"fixed_journal_id": cls.journal_c1.id,
"refund_payment_mode_id": cls.supplier_payment_mode.id,
"variable_journal_ids": [(6, 0, [cls.journal_c1.id])],
}
)
cls.supplier_payment_mode.write(
{"refund_payment_mode_id": cls.customer_payment_mode.id}
)
cls.customer = (
cls.env["res.partner"]
.with_company(cls.company.id)
.create(
{
"name": "Test customer",
"customer_payment_mode_id": cls.customer_payment_mode,
}
)
)
cls.supplier = (
cls.env["res.partner"]
.with_company(cls.company.id)
.create(
{
"name": "Test supplier",
"supplier_payment_mode_id": cls.supplier_payment_mode,
}
)
)
cls.supplier_bank = cls.env["res.partner.bank"].create(
{
"acc_number": "5345345",
"partner_id": cls.supplier.id,
"company_id": cls.company.id,
}
)
cls.supplier_bank_2 = cls.env["res.partner.bank"].create(
{
"acc_number": "3452342",
"partner_id": cls.supplier.id,
"company_id": cls.company_2.id,
}
)
cls.supplier.with_company(
cls.company_2.id
).supplier_payment_mode_id = cls.supplier_payment_mode_c2
cls.invoice_account = cls.env["account.account"].search(
[
("user_type_id", "=", cls.acct_type_payable.id),
("company_id", "=", cls.company.id),
],
limit=1,
)
cls.invoice_line_account = cls.env["account.account"].search(
[
("user_type_id", "=", cls.acct_type_expenses.id),
("company_id", "=", cls.company.id),
],
limit=1,
)
cls.journal_bank = cls.env["res.partner.bank"].create(
{
"acc_number": "GB95LOYD87430237296288",
"partner_id": cls.env.user.company_id.partner_id.id,
}
)
cls.journal = cls.env["account.journal"].create(
{
"name": "BANK TEST",
"code": "TEST",
"type": "bank",
"bank_account_id": cls.journal_bank.id,
}
)
cls.supplier_invoice = cls.move_model.create(
{
"partner_id": cls.supplier.id,
"invoice_date": fields.Date.today(),
"move_type": "in_invoice",
"journal_id": cls.journal_purchase.id,
}
)
def _create_invoice(self, default_move_type, partner):
move_form = Form(
self.env["account.move"].with_context(default_move_type=default_move_type)
)
move_form.partner_id = partner
move_form.invoice_date = Date.today()
with move_form.invoice_line_ids.new() as line_form:
line_form.product_id = self.env.ref("product.product_product_4")
line_form.name = "product that cost 100"
line_form.quantity = 1.0
line_form.price_unit = 100.0
line_form.account_id = self.invoice_line_account
return move_form.save()
def test_create_partner(self):
customer = (
self.env["res.partner"]
.with_company(self.company.id)
.create(
{
"name": "Test customer",
"customer_payment_mode_id": self.customer_payment_mode,
}
)
)
self.assertEqual(
customer.with_company(self.company.id).customer_payment_mode_id,
self.customer_payment_mode,
)
self.assertEqual(
customer.with_company(self.company_2.id).customer_payment_mode_id,
self.payment_mode_model,
)
def test_partner_id_changes_compute_partner_bank(self):
# Test _compute_partner_bank is executed when partner_id changes
move_form = Form(
self.env["account.move"].with_context(default_move_type="out_invoice")
)
self.assertFalse(move_form.partner_bank_id)
move_form.partner_id = self.customer
self.assertEqual(move_form.payment_mode_id, self.customer_payment_mode)
self.assertFalse(move_form.partner_bank_id)
def test_out_invoice_onchange(self):
# Test the onchange methods in invoice
invoice = self.move_model.new(
{
"partner_id": self.customer.id,
"move_type": "out_invoice",
"company_id": self.company.id,
}
)
self.assertEqual(invoice.payment_mode_id, self.customer_payment_mode)
invoice.company_id = self.company_2
self.assertEqual(invoice.payment_mode_id, self.payment_mode_model)
invoice.payment_mode_id = False
self.assertFalse(invoice.partner_bank_id)
def test_in_invoice_onchange(self):
# Test the onchange methods in invoice
self.manual_out.bank_account_required = True
invoice = self.move_model.new(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"company_id": self.company.id,
}
)
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode)
self.assertEqual(invoice.partner_bank_id, self.supplier_bank)
invoice.company_id = self.company_2
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode_c2)
self.assertEqual(invoice.partner_bank_id, self.supplier_bank_2)
invoice.payment_mode_id = self.supplier_payment_mode
self.assertTrue(invoice.partner_bank_id)
self.manual_out.bank_account_required = False
invoice.payment_mode_id = self.supplier_payment_mode_c2
self.assertFalse(invoice.partner_bank_id)
invoice.partner_id = False
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode_c2)
self.assertEqual(invoice.partner_bank_id, self.partner_bank_model)
def test_invoice_create_in_invoice(self):
invoice = self._create_invoice(
default_move_type="in_invoice", partner=self.supplier
)
invoice.action_post()
aml = invoice.line_ids.filtered(
lambda l: l.account_id.user_type_id == self.acct_type_payable
)
self.assertEqual(invoice.payment_mode_id, aml[0].payment_mode_id)
def test_invoice_create_out_invoice(self):
invoice = self._create_invoice(
default_move_type="out_invoice", partner=self.customer
)
invoice.action_post()
aml = invoice.line_ids.filtered(
lambda l: l.account_id.user_type_id == self.acct_type_receivable
)
self.assertEqual(invoice.payment_mode_id, aml[0].payment_mode_id)
def test_invoice_create_out_refund(self):
self.manual_out.bank_account_required = False
invoice = self._create_invoice(
default_move_type="out_refund", partner=self.customer
)
invoice.action_post()
self.assertEqual(
invoice.payment_mode_id,
self.customer.customer_payment_mode_id.refund_payment_mode_id,
)
def test_invoice_create_in_refund(self):
self.manual_in.bank_account_required = False
invoice = self._create_invoice(
default_move_type="in_refund", partner=self.supplier
)
invoice.action_post()
self.assertEqual(
invoice.payment_mode_id,
self.supplier.supplier_payment_mode_id.refund_payment_mode_id,
)
def test_invoice_constrains(self):
with self.assertRaises(UserError):
self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"company_id": self.company.id,
"payment_mode_id": self.supplier_payment_mode_c2.id,
}
)
def test_payment_mode_constrains_01(self):
self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"company_id": self.company.id,
}
)
with self.assertRaises(UserError):
self.supplier_payment_mode.company_id = self.company_2
def test_payment_mode_constrains_02(self):
self.move_model.create(
{
"date": fields.Date.today(),
"journal_id": self.journal_sale.id,
"name": "/",
"ref": "reference",
"state": "draft",
"invoice_line_ids": [
(
0,
0,
{
"account_id": self.invoice_account.id,
"credit": 1000,
"debit": 0,
"name": "Test",
"ref": "reference",
},
),
(
0,
0,
{
"account_id": self.invoice_line_account.id,
"credit": 0,
"debit": 1000,
"name": "Test",
"ref": "reference",
},
),
],
}
)
with self.assertRaises(UserError):
self.supplier_payment_mode.company_id = self.company_2
def test_invoice_in_refund(self):
invoice = self._create_invoice(
default_move_type="in_invoice", partner=self.supplier
)
invoice.partner_bank_id = False
invoice.action_post()
# Lets create a refund invoice for invoice_1.
# I refund the invoice Using Refund Button.
refund_invoice_wizard = (
self.env["account.move.reversal"]
.with_context(
{
"active_ids": [invoice.id],
"active_id": invoice.id,
"active_model": "account.move",
}
)
.create({"refund_method": "refund", "reason": "reason test create"})
)
refund_invoice = self.move_model.browse(
refund_invoice_wizard.reverse_moves()["res_id"]
)
self.assertEqual(
refund_invoice.payment_mode_id,
invoice.payment_mode_id.refund_payment_mode_id,
)
self.assertEqual(refund_invoice.partner_bank_id, invoice.partner_bank_id)
def test_invoice_out_refund(self):
invoice = self._create_invoice(
default_move_type="out_invoice", partner=self.customer
)
invoice.partner_bank_id = False
invoice.action_post()
# Lets create a refund invoice for invoice_1.
# I refund the invoice Using Refund Button.
refund_invoice_wizard = (
self.env["account.move.reversal"]
.with_context(
{
"active_ids": [invoice.id],
"active_id": invoice.id,
"active_model": "account.move",
}
)
.create({"refund_method": "refund", "reason": "reason test create"})
)
refund_invoice = self.move_model.browse(
refund_invoice_wizard.reverse_moves()["res_id"]
)
self.assertEqual(
refund_invoice.payment_mode_id,
invoice.payment_mode_id.refund_payment_mode_id,
)
self.assertEqual(refund_invoice.partner_bank_id, invoice.partner_bank_id)
def test_partner(self):
self.customer.write({"customer_payment_mode_id": self.customer_payment_mode.id})
self.assertEqual(
self.customer.customer_payment_mode_id, self.customer_payment_mode
)
def test_partner_onchange(self):
customer_invoice = self.move_model.create(
{"partner_id": self.customer.id, "move_type": "out_invoice"}
)
self.assertEqual(customer_invoice.payment_mode_id, self.customer_payment_mode)
self.assertEqual(self.supplier_invoice.partner_bank_id, self.supplier_bank)
vals = {"partner_id": self.customer.id, "move_type": "out_refund"}
invoice = self.move_model.new(vals)
self.assertEqual(invoice.payment_mode_id, self.supplier_payment_mode)
vals = {"partner_id": self.supplier.id, "move_type": "in_refund"}
invoice = self.move_model.new(vals)
self.assertEqual(invoice.payment_mode_id, self.customer_payment_mode)
vals = {"partner_id": False, "move_type": "out_invoice"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.payment_mode_id)
vals = {"partner_id": False, "move_type": "out_refund"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.partner_bank_id)
vals = {"partner_id": False, "move_type": "in_invoice"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.partner_bank_id)
vals = {"partner_id": False, "move_type": "in_refund"}
invoice = self.move_model.new(vals)
self.assertFalse(invoice.partner_bank_id)
def test_onchange_payment_mode_id(self):
mode = self.supplier_payment_mode
mode.payment_method_id.bank_account_required = True
self.supplier_invoice.partner_bank_id = self.supplier_bank.id
self.supplier_invoice.payment_mode_id = mode.id
self.assertEqual(self.supplier_invoice.partner_bank_id, self.supplier_bank)
mode.payment_method_id.bank_account_required = False
self.assertEqual(self.supplier_invoice.partner_bank_id, self.supplier_bank)
self.supplier_invoice.payment_mode_id = False
self.assertFalse(self.supplier_invoice.partner_bank_id)
def test_print_report(self):
self.supplier_invoice.partner_bank_id = self.supplier_bank.id
report = self.env.ref("account.account_invoices")
res = str(report._render_qweb_html(self.supplier_invoice.ids)[0])
self.assertIn(self.supplier_bank.acc_number, res)
payment_mode = self.supplier_payment_mode
payment_mode.show_bank_account_from_journal = True
self.supplier_invoice.payment_mode_id = payment_mode.id
self.supplier_invoice.partner_bank_id = False
res = str(report._render_qweb_html(self.supplier_invoice.ids)[0])
self.assertIn(self.journal_c1.bank_acc_number, res)
payment_mode.bank_account_link = "variable"
payment_mode.variable_journal_ids = [(6, 0, self.journal.ids)]
res = str(report._render_qweb_html(self.supplier_invoice.ids)[0])
self.assertIn(self.journal_bank.acc_number, res)
def test_filter_type_domain(self):
in_invoice = self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_invoice",
"invoice_date": fields.Date.today(),
"journal_id": self.journal_purchase.id,
}
)
self.assertEqual(in_invoice.payment_mode_filter_type_domain, "outbound")
self.assertEqual(
in_invoice.partner_bank_filter_type_domain, in_invoice.commercial_partner_id
)
out_refund = self.move_model.create(
{
"partner_id": self.customer.id,
"move_type": "out_refund",
"journal_id": self.journal_sale.id,
}
)
self.assertEqual(out_refund.payment_mode_filter_type_domain, "outbound")
self.assertEqual(
out_refund.partner_bank_filter_type_domain, out_refund.commercial_partner_id
)
in_refund = self.move_model.create(
{
"partner_id": self.supplier.id,
"move_type": "in_refund",
"journal_id": self.journal_purchase.id,
}
)
self.assertEqual(in_refund.payment_mode_filter_type_domain, "inbound")
self.assertEqual(
in_refund.partner_bank_filter_type_domain, in_refund.bank_partner_id
)
out_invoice = self.move_model.create(
{
"partner_id": self.customer.id,
"move_type": "out_invoice",
"journal_id": self.journal_sale.id,
}
)
self.assertEqual(out_invoice.payment_mode_filter_type_domain, "inbound")
self.assertEqual(
out_invoice.partner_bank_filter_type_domain, out_invoice.bank_partner_id
)
def test_account_move_payment_mode_id_default(self):
payment_mode = self.env.ref("account_payment_mode.payment_mode_inbound_dd1")
field = self.env["ir.model.fields"].search(
[
("model_id.model", "=", self.move_model._name),
("name", "=", "payment_mode_id"),
]
)
move_form = Form(self.move_model.with_context(default_type="out_invoice"))
self.assertFalse(move_form.payment_mode_id)
self.env["ir.default"].create(
{"field_id": field.id, "json_value": payment_mode.id}
)
move_form = Form(self.move_model.with_context(default_type="out_invoice"))
self.assertEqual(move_form.payment_mode_id, payment_mode)
|
OCA/bank-payment
|
account_payment_partner/tests/test_account_payment_partner.py
|
Python
|
agpl-3.0
| 21,944 | 0.001276 |
from __future__ import unicode_literals
import base64
import datetime
import hashlib
import json
import netrc
import os
import re
import socket
import sys
import time
import xml.etree.ElementTree
from ..compat import (
compat_cookiejar,
compat_http_client,
compat_urllib_error,
compat_urllib_parse_urlparse,
compat_urlparse,
compat_str,
)
from ..utils import (
clean_html,
compiled_regex_type,
ExtractorError,
float_or_none,
int_or_none,
RegexNotFoundError,
sanitize_filename,
unescapeHTML,
)
_NO_DEFAULT = object()
class InfoExtractor(object):
"""Information Extractor class.
Information extractors are the classes that, given a URL, extract
information about the video (or videos) the URL refers to. This
information includes the real video URL, the video title, author and
others. The information is stored in a dictionary which is then
passed to the YoutubeDL. The YoutubeDL processes this
information possibly downloading the video to the file system, among
other possible outcomes.
The type field determines the the type of the result.
By far the most common value (and the default if _type is missing) is
"video", which indicates a single video.
For a video, the dictionaries must include the following fields:
id: Video identifier.
title: Video title, unescaped.
Additionally, it must contain either a formats entry or a url one:
formats: A list of dictionaries for each format available, ordered
from worst to best quality.
Potential fields:
* url Mandatory. The URL of the video file
* ext Will be calculated from url if missing
* format A human-readable description of the format
("mp4 container with h264/opus").
Calculated from the format_id, width, height.
and format_note fields if missing.
* format_id A short description of the format
("mp4_h264_opus" or "19").
Technically optional, but strongly recommended.
* format_note Additional info about the format
("3D" or "DASH video")
* width Width of the video, if known
* height Height of the video, if known
* resolution Textual description of width and height
* tbr Average bitrate of audio and video in KBit/s
* abr Average audio bitrate in KBit/s
* acodec Name of the audio codec in use
* asr Audio sampling rate in Hertz
* vbr Average video bitrate in KBit/s
* fps Frame rate
* vcodec Name of the video codec in use
* container Name of the container format
* filesize The number of bytes, if known in advance
* filesize_approx An estimate for the number of bytes
* player_url SWF Player URL (used for rtmpdump).
* protocol The protocol that will be used for the actual
download, lower-case.
"http", "https", "rtsp", "rtmp", "m3u8" or so.
* preference Order number of this format. If this field is
present and not None, the formats get sorted
by this field, regardless of all other values.
-1 for default (order by other properties),
-2 or smaller for less than default.
* language_preference Is this in the correct requested
language?
10 if it's what the URL is about,
-1 for default (don't know),
-10 otherwise, other values reserved for now.
* quality Order number of the video quality of this
format, irrespective of the file format.
-1 for default (order by other properties),
-2 or smaller for less than default.
* source_preference Order number for this video source
(quality takes higher priority)
-1 for default (order by other properties),
-2 or smaller for less than default.
* http_referer HTTP Referer header value to set.
* http_method HTTP method to use for the download.
* http_headers A dictionary of additional HTTP headers
to add to the request.
* http_post_data Additional data to send with a POST
request.
url: Final video URL.
ext: Video filename extension.
format: The video format, defaults to ext (used for --get-format)
player_url: SWF Player URL (used for rtmpdump).
The following fields are optional:
alt_title: A secondary title of the video.
display_id An alternative identifier for the video, not necessarily
unique, but available before title. Typically, id is
something like "4234987", title "Dancing naked mole rats",
and display_id "dancing-naked-mole-rats"
thumbnails: A list of dictionaries, with the following entries:
* "url"
* "width" (optional, int)
* "height" (optional, int)
* "resolution" (optional, string "{width}x{height"},
deprecated)
thumbnail: Full URL to a video thumbnail image.
description: Full video description.
uploader: Full name of the video uploader.
timestamp: UNIX timestamp of the moment the video became available.
upload_date: Video upload date (YYYYMMDD).
If not explicitly set, calculated from timestamp.
uploader_id: Nickname or id of the video uploader.
location: Physical location where the video was filmed.
subtitles: The subtitle file contents as a dictionary in the format
{language: subtitles}.
duration: Length of the video in seconds, as an integer.
view_count: How many users have watched the video on the platform.
like_count: Number of positive ratings of the video
dislike_count: Number of negative ratings of the video
comment_count: Number of comments on the video
age_limit: Age restriction for the video, as an integer (years)
webpage_url: The url to the video webpage, if given to youtube-dl it
should allow to get the same result again. (It will be set
by YoutubeDL if it's missing)
categories: A list of categories that the video falls in, for example
["Sports", "Berlin"]
is_live: True, False, or None (=unknown). Whether this video is a
live stream that goes on instead of a fixed-length video.
Unless mentioned otherwise, the fields should be Unicode strings.
Unless mentioned otherwise, None is equivalent to absence of information.
_type "playlist" indicates multiple videos.
There must be a key "entries", which is a list, an iterable, or a PagedList
object, each element of which is a valid dictionary by this specification.
Additionally, playlists can have "title" and "id" attributes with the same
semantics as videos (see above).
_type "multi_video" indicates that there are multiple videos that
form a single show, for examples multiple acts of an opera or TV episode.
It must have an entries key like a playlist and contain all the keys
required for a video at the same time.
_type "url" indicates that the video must be extracted from another
location, possibly by a different extractor. Its only required key is:
"url" - the next URL to extract.
The key "ie_key" can be set to the class name (minus the trailing "IE",
e.g. "Youtube") if the extractor class is known in advance.
Additionally, the dictionary may have any properties of the resolved entity
known in advance, for example "title" if the title of the referred video is
known ahead of time.
_type "url_transparent" entities have the same specification as "url", but
indicate that the given additional information is more precise than the one
associated with the resolved URL.
This is useful when a site employs a video service that hosts the video and
its technical metadata, but that video service does not embed a useful
title, description etc.
Subclasses of this one should re-define the _real_initialize() and
_real_extract() methods and define a _VALID_URL regexp.
Probably, they should also be added to the list of extractors.
Finally, the _WORKING attribute should be set to False for broken IEs
in order to warn the users and skip the tests.
"""
_ready = False
_downloader = None
_WORKING = True
def __init__(self, downloader=None):
"""Constructor. Receives an optional downloader."""
self._ready = False
self.set_downloader(downloader)
@classmethod
def suitable(cls, url):
"""Receives a URL and returns True if suitable for this IE."""
# This does not use has/getattr intentionally - we want to know whether
# we have cached the regexp for *this* class, whereas getattr would also
# match the superclass
if '_VALID_URL_RE' not in cls.__dict__:
cls._VALID_URL_RE = re.compile(cls._VALID_URL)
return cls._VALID_URL_RE.match(url) is not None
@classmethod
def _match_id(cls, url):
if '_VALID_URL_RE' not in cls.__dict__:
cls._VALID_URL_RE = re.compile(cls._VALID_URL)
m = cls._VALID_URL_RE.match(url)
assert m
return m.group('id')
@classmethod
def working(cls):
"""Getter method for _WORKING."""
return cls._WORKING
def initialize(self):
"""Initializes an instance (authentication, etc)."""
if not self._ready:
self._real_initialize()
self._ready = True
def extract(self, url):
"""Extracts URL information and returns it in list of dicts."""
self.initialize()
return self._real_extract(url)
def set_downloader(self, downloader):
"""Sets the downloader for this IE."""
self._downloader = downloader
def _real_initialize(self):
"""Real initialization process. Redefine in subclasses."""
pass
def _real_extract(self, url):
"""Real extraction process. Redefine in subclasses."""
pass
@classmethod
def ie_key(cls):
"""A string for getting the InfoExtractor with get_info_extractor"""
return cls.__name__[:-2]
@property
def IE_NAME(self):
return type(self).__name__[:-2]
def _request_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True):
""" Returns the response handle """
if note is None:
self.report_download_webpage(video_id)
elif note is not False:
if video_id is None:
self.to_screen('%s' % (note,))
else:
self.to_screen('%s: %s' % (video_id, note))
try:
return self._downloader.urlopen(url_or_request)
except (compat_urllib_error.URLError, compat_http_client.HTTPException, socket.error) as err:
if errnote is False:
return False
if errnote is None:
errnote = 'Unable to download webpage'
errmsg = '%s: %s' % (errnote, compat_str(err))
if fatal:
raise ExtractorError(errmsg, sys.exc_info()[2], cause=err)
else:
self._downloader.report_warning(errmsg)
return False
def _download_webpage_handle(self, url_or_request, video_id, note=None, errnote=None, fatal=True):
""" Returns a tuple (page content as string, URL handle) """
# Strip hashes from the URL (#1038)
if isinstance(url_or_request, (compat_str, str)):
url_or_request = url_or_request.partition('#')[0]
urlh = self._request_webpage(url_or_request, video_id, note, errnote, fatal)
if urlh is False:
assert not fatal
return False
content = self._webpage_read_content(urlh, url_or_request, video_id, note, errnote, fatal)
return (content, urlh)
def _webpage_read_content(self, urlh, url_or_request, video_id, note=None, errnote=None, fatal=True, prefix=None):
content_type = urlh.headers.get('Content-Type', '')
webpage_bytes = urlh.read()
if prefix is not None:
webpage_bytes = prefix + webpage_bytes
m = re.match(r'[a-zA-Z0-9_.-]+/[a-zA-Z0-9_.-]+\s*;\s*charset=(.+)', content_type)
if m:
encoding = m.group(1)
else:
m = re.search(br'<meta[^>]+charset=[\'"]?([^\'")]+)[ /\'">]',
webpage_bytes[:1024])
if m:
encoding = m.group(1).decode('ascii')
elif webpage_bytes.startswith(b'\xff\xfe'):
encoding = 'utf-16'
else:
encoding = 'utf-8'
if self._downloader.params.get('dump_intermediate_pages', False):
try:
url = url_or_request.get_full_url()
except AttributeError:
url = url_or_request
self.to_screen('Dumping request to ' + url)
dump = base64.b64encode(webpage_bytes).decode('ascii')
self._downloader.to_screen(dump)
if self._downloader.params.get('write_pages', False):
try:
url = url_or_request.get_full_url()
except AttributeError:
url = url_or_request
basen = '%s_%s' % (video_id, url)
if len(basen) > 240:
h = '___' + hashlib.md5(basen.encode('utf-8')).hexdigest()
basen = basen[:240 - len(h)] + h
raw_filename = basen + '.dump'
filename = sanitize_filename(raw_filename, restricted=True)
self.to_screen('Saving request to ' + filename)
# Working around MAX_PATH limitation on Windows (see
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx)
if os.name == 'nt':
absfilepath = os.path.abspath(filename)
if len(absfilepath) > 259:
filename = '\\\\?\\' + absfilepath
with open(filename, 'wb') as outf:
outf.write(webpage_bytes)
try:
content = webpage_bytes.decode(encoding, 'replace')
except LookupError:
content = webpage_bytes.decode('utf-8', 'replace')
if ('<title>Access to this site is blocked</title>' in content and
'Websense' in content[:512]):
msg = 'Access to this webpage has been blocked by Websense filtering software in your network.'
blocked_iframe = self._html_search_regex(
r'<iframe src="([^"]+)"', content,
'Websense information URL', default=None)
if blocked_iframe:
msg += ' Visit %s for more details' % blocked_iframe
raise ExtractorError(msg, expected=True)
return content
def _download_webpage(self, url_or_request, video_id, note=None, errnote=None, fatal=True):
""" Returns the data of the page as a string """
res = self._download_webpage_handle(url_or_request, video_id, note, errnote, fatal)
if res is False:
return res
else:
content, _ = res
return content
def _download_xml(self, url_or_request, video_id,
note='Downloading XML', errnote='Unable to download XML',
transform_source=None, fatal=True):
"""Return the xml as an xml.etree.ElementTree.Element"""
xml_string = self._download_webpage(
url_or_request, video_id, note, errnote, fatal=fatal)
if xml_string is False:
return xml_string
if transform_source:
xml_string = transform_source(xml_string)
return xml.etree.ElementTree.fromstring(xml_string.encode('utf-8'))
def _download_json(self, url_or_request, video_id,
note='Downloading JSON metadata',
errnote='Unable to download JSON metadata',
transform_source=None,
fatal=True):
json_string = self._download_webpage(
url_or_request, video_id, note, errnote, fatal=fatal)
if (not fatal) and json_string is False:
return None
return self._parse_json(
json_string, video_id, transform_source=transform_source, fatal=fatal)
def _parse_json(self, json_string, video_id, transform_source=None, fatal=True):
if transform_source:
json_string = transform_source(json_string)
try:
return json.loads(json_string)
except ValueError as ve:
errmsg = '%s: Failed to parse JSON ' % video_id
if fatal:
raise ExtractorError(errmsg, cause=ve)
else:
self.report_warning(errmsg + str(ve))
def report_warning(self, msg, video_id=None):
idstr = '' if video_id is None else '%s: ' % video_id
self._downloader.report_warning(
'[%s] %s%s' % (self.IE_NAME, idstr, msg))
def to_screen(self, msg):
"""Print msg to screen, prefixing it with '[ie_name]'"""
self._downloader.to_screen('[%s] %s' % (self.IE_NAME, msg))
def report_extraction(self, id_or_name):
"""Report information extraction."""
self.to_screen('%s: Extracting information' % id_or_name)
def report_download_webpage(self, video_id):
"""Report webpage download."""
self.to_screen('%s: Downloading webpage' % video_id)
def report_age_confirmation(self):
"""Report attempt to confirm age."""
self.to_screen('Confirming age')
def report_login(self):
"""Report attempt to log in."""
self.to_screen('Logging in')
# Methods for following #608
@staticmethod
def url_result(url, ie=None, video_id=None):
"""Returns a url that points to a page that should be processed"""
# TODO: ie should be the class used for getting the info
video_info = {'_type': 'url',
'url': url,
'ie_key': ie}
if video_id is not None:
video_info['id'] = video_id
return video_info
@staticmethod
def playlist_result(entries, playlist_id=None, playlist_title=None, playlist_description=None):
"""Returns a playlist"""
video_info = {'_type': 'playlist',
'entries': entries}
if playlist_id:
video_info['id'] = playlist_id
if playlist_title:
video_info['title'] = playlist_title
if playlist_description:
video_info['description'] = playlist_description
return video_info
def _search_regex(self, pattern, string, name, default=_NO_DEFAULT, fatal=True, flags=0, group=None):
"""
Perform a regex search on the given string, using a single or a list of
patterns returning the first matching group.
In case of failure return a default value or raise a WARNING or a
RegexNotFoundError, depending on fatal, specifying the field name.
"""
if isinstance(pattern, (str, compat_str, compiled_regex_type)):
mobj = re.search(pattern, string, flags)
else:
for p in pattern:
mobj = re.search(p, string, flags)
if mobj:
break
if os.name != 'nt' and sys.stderr.isatty():
_name = '\033[0;34m%s\033[0m' % name
else:
_name = name
if mobj:
if group is None:
# return the first matching group
return next(g for g in mobj.groups() if g is not None)
else:
return mobj.group(group)
elif default is not _NO_DEFAULT:
return default
elif fatal:
raise RegexNotFoundError('Unable to extract %s' % _name)
else:
self._downloader.report_warning('unable to extract %s; '
'please report this issue on http://yt-dl.org/bug' % _name)
return None
def _html_search_regex(self, pattern, string, name, default=_NO_DEFAULT, fatal=True, flags=0, group=None):
"""
Like _search_regex, but strips HTML tags and unescapes entities.
"""
res = self._search_regex(pattern, string, name, default, fatal, flags, group)
if res:
return clean_html(res).strip()
else:
return res
def _get_login_info(self):
"""
Get the the login info as (username, password)
It will look in the netrc file using the _NETRC_MACHINE value
If there's no info available, return (None, None)
"""
if self._downloader is None:
return (None, None)
username = None
password = None
downloader_params = self._downloader.params
# Attempt to use provided username and password or .netrc data
if downloader_params.get('username', None) is not None:
username = downloader_params['username']
password = downloader_params['password']
elif downloader_params.get('usenetrc', False):
try:
info = netrc.netrc().authenticators(self._NETRC_MACHINE)
if info is not None:
username = info[0]
password = info[2]
else:
raise netrc.NetrcParseError('No authenticators for %s' % self._NETRC_MACHINE)
except (IOError, netrc.NetrcParseError) as err:
self._downloader.report_warning('parsing .netrc: %s' % compat_str(err))
return (username, password)
def _get_tfa_info(self):
"""
Get the two-factor authentication info
TODO - asking the user will be required for sms/phone verify
currently just uses the command line option
If there's no info available, return None
"""
if self._downloader is None:
return None
downloader_params = self._downloader.params
if downloader_params.get('twofactor', None) is not None:
return downloader_params['twofactor']
return None
# Helper functions for extracting OpenGraph info
@staticmethod
def _og_regexes(prop):
content_re = r'content=(?:"([^>]+?)"|\'([^>]+?)\')'
property_re = r'(?:name|property)=[\'"]og:%s[\'"]' % re.escape(prop)
template = r'<meta[^>]+?%s[^>]+?%s'
return [
template % (property_re, content_re),
template % (content_re, property_re),
]
def _og_search_property(self, prop, html, name=None, **kargs):
if name is None:
name = 'OpenGraph %s' % prop
escaped = self._search_regex(self._og_regexes(prop), html, name, flags=re.DOTALL, **kargs)
if escaped is None:
return None
return unescapeHTML(escaped)
def _og_search_thumbnail(self, html, **kargs):
return self._og_search_property('image', html, 'thumbnail url', fatal=False, **kargs)
def _og_search_description(self, html, **kargs):
return self._og_search_property('description', html, fatal=False, **kargs)
def _og_search_title(self, html, **kargs):
return self._og_search_property('title', html, **kargs)
def _og_search_video_url(self, html, name='video url', secure=True, **kargs):
regexes = self._og_regexes('video') + self._og_regexes('video:url')
if secure:
regexes = self._og_regexes('video:secure_url') + regexes
return self._html_search_regex(regexes, html, name, **kargs)
def _og_search_url(self, html, **kargs):
return self._og_search_property('url', html, **kargs)
def _html_search_meta(self, name, html, display_name=None, fatal=False, **kwargs):
if display_name is None:
display_name = name
return self._html_search_regex(
r'''(?isx)<meta
(?=[^>]+(?:itemprop|name|property)=(["\']?)%s\1)
[^>]+content=(["\'])(?P<content>.*?)\1''' % re.escape(name),
html, display_name, fatal=fatal, group='content', **kwargs)
def _dc_search_uploader(self, html):
return self._html_search_meta('dc.creator', html, 'uploader')
def _rta_search(self, html):
# See http://www.rtalabel.org/index.php?content=howtofaq#single
if re.search(r'(?ix)<meta\s+name="rating"\s+'
r' content="RTA-5042-1996-1400-1577-RTA"',
html):
return 18
return 0
def _media_rating_search(self, html):
# See http://www.tjg-designs.com/WP/metadata-code-examples-adding-metadata-to-your-web-pages/
rating = self._html_search_meta('rating', html)
if not rating:
return None
RATING_TABLE = {
'safe for kids': 0,
'general': 8,
'14 years': 14,
'mature': 17,
'restricted': 19,
}
return RATING_TABLE.get(rating.lower(), None)
def _twitter_search_player(self, html):
return self._html_search_meta('twitter:player', html,
'twitter card player')
def _sort_formats(self, formats):
if not formats:
raise ExtractorError('No video formats found')
def _formats_key(f):
# TODO remove the following workaround
from ..utils import determine_ext
if not f.get('ext') and 'url' in f:
f['ext'] = determine_ext(f['url'])
preference = f.get('preference')
if preference is None:
proto = f.get('protocol')
if proto is None:
proto = compat_urllib_parse_urlparse(f.get('url', '')).scheme
preference = 0 if proto in ['http', 'https'] else -0.1
if f.get('ext') in ['f4f', 'f4m']: # Not yet supported
preference -= 0.5
if f.get('vcodec') == 'none': # audio only
if self._downloader.params.get('prefer_free_formats'):
ORDER = ['aac', 'mp3', 'm4a', 'webm', 'ogg', 'opus']
else:
ORDER = ['webm', 'opus', 'ogg', 'mp3', 'aac', 'm4a']
ext_preference = 0
try:
audio_ext_preference = ORDER.index(f['ext'])
except ValueError:
audio_ext_preference = -1
else:
if self._downloader.params.get('prefer_free_formats'):
ORDER = ['flv', 'mp4', 'webm']
else:
ORDER = ['webm', 'flv', 'mp4']
try:
ext_preference = ORDER.index(f['ext'])
except ValueError:
ext_preference = -1
audio_ext_preference = 0
return (
preference,
f.get('language_preference') if f.get('language_preference') is not None else -1,
f.get('quality') if f.get('quality') is not None else -1,
f.get('height') if f.get('height') is not None else -1,
f.get('width') if f.get('width') is not None else -1,
ext_preference,
f.get('tbr') if f.get('tbr') is not None else -1,
f.get('vbr') if f.get('vbr') is not None else -1,
f.get('abr') if f.get('abr') is not None else -1,
audio_ext_preference,
f.get('fps') if f.get('fps') is not None else -1,
f.get('filesize') if f.get('filesize') is not None else -1,
f.get('filesize_approx') if f.get('filesize_approx') is not None else -1,
f.get('source_preference') if f.get('source_preference') is not None else -1,
f.get('format_id'),
)
formats.sort(key=_formats_key)
def http_scheme(self):
""" Either "http:" or "https:", depending on the user's preferences """
return (
'http:'
if self._downloader.params.get('prefer_insecure', False)
else 'https:')
def _proto_relative_url(self, url, scheme=None):
if url is None:
return url
if url.startswith('//'):
if scheme is None:
scheme = self.http_scheme()
return scheme + url
else:
return url
def _sleep(self, timeout, video_id, msg_template=None):
if msg_template is None:
msg_template = '%(video_id)s: Waiting for %(timeout)s seconds'
msg = msg_template % {'video_id': video_id, 'timeout': timeout}
self.to_screen(msg)
time.sleep(timeout)
def _extract_f4m_formats(self, manifest_url, video_id):
manifest = self._download_xml(
manifest_url, video_id, 'Downloading f4m manifest',
'Unable to download f4m manifest')
formats = []
media_nodes = manifest.findall('{http://ns.adobe.com/f4m/1.0}media')
for i, media_el in enumerate(media_nodes):
tbr = int_or_none(media_el.attrib.get('bitrate'))
format_id = 'f4m-%d' % (i if tbr is None else tbr)
formats.append({
'format_id': format_id,
'url': manifest_url,
'ext': 'flv',
'tbr': tbr,
'width': int_or_none(media_el.attrib.get('width')),
'height': int_or_none(media_el.attrib.get('height')),
})
self._sort_formats(formats)
return formats
def _extract_m3u8_formats(self, m3u8_url, video_id, ext=None,
entry_protocol='m3u8', preference=None):
formats = [{
'format_id': 'm3u8-meta',
'url': m3u8_url,
'ext': ext,
'protocol': 'm3u8',
'preference': -1,
'resolution': 'multiple',
'format_note': 'Quality selection URL',
}]
format_url = lambda u: (
u
if re.match(r'^https?://', u)
else compat_urlparse.urljoin(m3u8_url, u))
m3u8_doc = self._download_webpage(
m3u8_url, video_id,
note='Downloading m3u8 information',
errnote='Failed to download m3u8 information')
last_info = None
kv_rex = re.compile(
r'(?P<key>[a-zA-Z_-]+)=(?P<val>"[^"]+"|[^",]+)(?:,|$)')
for line in m3u8_doc.splitlines():
if line.startswith('#EXT-X-STREAM-INF:'):
last_info = {}
for m in kv_rex.finditer(line):
v = m.group('val')
if v.startswith('"'):
v = v[1:-1]
last_info[m.group('key')] = v
elif line.startswith('#') or not line.strip():
continue
else:
if last_info is None:
formats.append({'url': format_url(line)})
continue
tbr = int_or_none(last_info.get('BANDWIDTH'), scale=1000)
f = {
'format_id': 'm3u8-%d' % (tbr if tbr else len(formats)),
'url': format_url(line.strip()),
'tbr': tbr,
'ext': ext,
'protocol': entry_protocol,
'preference': preference,
}
codecs = last_info.get('CODECS')
if codecs:
# TODO: looks like video codec is not always necessarily goes first
va_codecs = codecs.split(',')
if va_codecs[0]:
f['vcodec'] = va_codecs[0].partition('.')[0]
if len(va_codecs) > 1 and va_codecs[1]:
f['acodec'] = va_codecs[1].partition('.')[0]
resolution = last_info.get('RESOLUTION')
if resolution:
width_str, height_str = resolution.split('x')
f['width'] = int(width_str)
f['height'] = int(height_str)
formats.append(f)
last_info = {}
self._sort_formats(formats)
return formats
# TODO: improve extraction
def _extract_smil_formats(self, smil_url, video_id):
smil = self._download_xml(
smil_url, video_id, 'Downloading SMIL file',
'Unable to download SMIL file')
base = smil.find('./head/meta').get('base')
formats = []
rtmp_count = 0
for video in smil.findall('./body/switch/video'):
src = video.get('src')
if not src:
continue
bitrate = int_or_none(video.get('system-bitrate') or video.get('systemBitrate'), 1000)
width = int_or_none(video.get('width'))
height = int_or_none(video.get('height'))
proto = video.get('proto')
if not proto:
if base:
if base.startswith('rtmp'):
proto = 'rtmp'
elif base.startswith('http'):
proto = 'http'
ext = video.get('ext')
if proto == 'm3u8':
formats.extend(self._extract_m3u8_formats(src, video_id, ext))
elif proto == 'rtmp':
rtmp_count += 1
streamer = video.get('streamer') or base
formats.append({
'url': streamer,
'play_path': src,
'ext': 'flv',
'format_id': 'rtmp-%d' % (rtmp_count if bitrate is None else bitrate),
'tbr': bitrate,
'width': width,
'height': height,
})
self._sort_formats(formats)
return formats
def _live_title(self, name):
""" Generate the title for a live video """
now = datetime.datetime.now()
now_str = now.strftime("%Y-%m-%d %H:%M")
return name + ' ' + now_str
def _int(self, v, name, fatal=False, **kwargs):
res = int_or_none(v, **kwargs)
if 'get_attr' in kwargs:
print(getattr(v, kwargs['get_attr']))
if res is None:
msg = 'Failed to extract %s: Could not parse value %r' % (name, v)
if fatal:
raise ExtractorError(msg)
else:
self._downloader.report_warning(msg)
return res
def _float(self, v, name, fatal=False, **kwargs):
res = float_or_none(v, **kwargs)
if res is None:
msg = 'Failed to extract %s: Could not parse value %r' % (name, v)
if fatal:
raise ExtractorError(msg)
else:
self._downloader.report_warning(msg)
return res
def _set_cookie(self, domain, name, value, expire_time=None):
cookie = compat_cookiejar.Cookie(
0, name, value, None, None, domain, None,
None, '/', True, False, expire_time, '', None, None, None)
self._downloader.cookiejar.set_cookie(cookie)
class SearchInfoExtractor(InfoExtractor):
"""
Base class for paged search queries extractors.
They accept urls in the format _SEARCH_KEY(|all|[0-9]):{query}
Instances should define _SEARCH_KEY and _MAX_RESULTS.
"""
@classmethod
def _make_valid_url(cls):
return r'%s(?P<prefix>|[1-9][0-9]*|all):(?P<query>[\s\S]+)' % cls._SEARCH_KEY
@classmethod
def suitable(cls, url):
return re.match(cls._make_valid_url(), url) is not None
def _real_extract(self, query):
mobj = re.match(self._make_valid_url(), query)
if mobj is None:
raise ExtractorError('Invalid search query "%s"' % query)
prefix = mobj.group('prefix')
query = mobj.group('query')
if prefix == '':
return self._get_n_results(query, 1)
elif prefix == 'all':
return self._get_n_results(query, self._MAX_RESULTS)
else:
n = int(prefix)
if n <= 0:
raise ExtractorError('invalid download number %s for query "%s"' % (n, query))
elif n > self._MAX_RESULTS:
self._downloader.report_warning('%s returns max %i results (you requested %i)' % (self._SEARCH_KEY, self._MAX_RESULTS, n))
n = self._MAX_RESULTS
return self._get_n_results(query, n)
def _get_n_results(self, query, n):
"""Get a specified number of results for a query"""
raise NotImplementedError("This method must be implemented by subclasses")
@property
def SEARCH_KEY(self):
return self._SEARCH_KEY
|
fluks/youtube-dl
|
youtube_dl/extractor/common.py
|
Python
|
unlicense
| 38,144 | 0.001022 |
# Originally contributed by Stefan Schukat as part of this arbitrary-sized
# arrays patch.
from win32com.client import gencache
from win32com.test import util
import unittest
ZeroD = 0
OneDEmpty = []
OneD = [1,2,3]
TwoD = [
[1,2,3],
[1,2,3],
[1,2,3]
]
TwoD1 = [
[
[1,2,3,5],
[1,2,3],
[1,2,3]
],
[
[1,2,3],
[1,2,3],
[1,2,3]
]
]
OneD1 = [
[
[1,2,3],
[1,2,3],
[1,2,3]
],
[
[1,2,3],
[1,2,3]
]
]
OneD2 = [
[1,2,3],
[1,2,3,4,5],
[
[1,2,3,4,5],
[1,2,3,4,5],
[1,2,3,4,5]
]
]
ThreeD = [
[
[1,2,3],
[1,2,3],
[1,2,3]
],
[
[1,2,3],
[1,2,3],
[1,2,3]
]
]
FourD = [
[
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]]
],
[
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]],
[[1,2,3],[1,2,3],[1,2,3]]
]
]
LargeD = [
[ [list(range(10))] * 10],
] * 512
def _normalize_array(a):
if type(a) != type(()):
return a
ret = []
for i in a:
ret.append(_normalize_array(i))
return ret
class ArrayTest(util.TestCase):
def setUp(self):
self.arr = gencache.EnsureDispatch("PyCOMTest.ArrayTest")
def tearDown(self):
self.arr = None
def _doTest(self, array):
self.arr.Array = array
self.failUnlessEqual(_normalize_array(self.arr.Array), array)
def testZeroD(self):
self._doTest(ZeroD)
def testOneDEmpty(self):
self._doTest(OneDEmpty)
def testOneD(self):
self._doTest(OneD)
def testTwoD(self):
self._doTest(TwoD)
def testThreeD(self):
self._doTest(ThreeD)
def testFourD(self):
self._doTest(FourD)
def testTwoD1(self):
self._doTest(TwoD1)
def testOneD1(self):
self._doTest(OneD1)
def testOneD2(self):
self._doTest(OneD2)
def testLargeD(self):
self._doTest(LargeD)
if __name__ == "__main__":
try:
util.testmain()
except SystemExit as rc:
if not rc:
raise
|
sserrot/champion_relationships
|
venv/Lib/site-packages/win32com/test/testArrays.py
|
Python
|
mit
| 2,505 | 0.050699 |
# -*- coding: utf-8 -*-
import json
from mock import patch
from collections import defaultdict
from requests import Session
from connect.api import ConnectApi
from connect.event import Event
from connect import responses
PROJECT_ID = "MY_PROJECT_ID"
API_PUSH_KEY = "MY_PUSH_API_KEY"
BASE_URL = "https://api.getconnect.io"
COLLECTION_NAME = 'my_collection'
MULTI_EVENT_DATA = [{
'type': 'cycling',
'distance': 21255,
'caloriesBurned': 455,
'duration': 67,
'user': {
'id': '638396',
'name': 'Bruce'
}
},
{
'type': 'swimming',
'distance': 21255,
'caloriesBurned': 455,
'duration': 67,
'user': {
'id': '638396',
'name': 'Bruce',
}
}
]
SINGLE_EVENT_DATA = {
'type': 'cycling',
'distance': 21255,
'caloriesBurned': 455,
'duration': 67,
'user': {
'id': '638396',
'first': 'Bruce'
}
}
@patch("connect.api.Session.post")
class TestConnectAPI():
def setup_method(self, method):
batched = defaultdict(list)
for e in MULTI_EVENT_DATA:
e = Event(COLLECTION_NAME, e)
batched[COLLECTION_NAME].append(e.body)
self.multi_events = batched
self.single_event = Event(COLLECTION_NAME,SINGLE_EVENT_DATA)
self.connect = ConnectApi(project_id=PROJECT_ID,
api_key=API_PUSH_KEY
)
def test_init(self, post):
assert PROJECT_ID == self.connect._project_id
assert API_PUSH_KEY == self.connect._api_key
assert "https://api.getconnect.io" == self.connect._base_url
assert 60 == self.connect._get_timeout
assert 60 == self.connect._post_timeout
assert isinstance(self.connect._session, Session)
connect = ConnectApi(project_id=PROJECT_ID,
api_key=API_PUSH_KEY,
base_url="myurl",
post_timeout=10,
get_timeout=5
)
assert connect._base_url == "myurl"
assert connect._post_timeout == 10
assert connect._get_timeout == 5
def test_post_event(self, post ):
#200 - empty response
mocked_response = mocked_connect_response(200,None)
post.return_value = mocked_response
result = self.connect.post_event(self.single_event)
assert isinstance(result,responses.PushResponse)
post.reset_mock()
with patch("connect.api.ConnectApi._build_response") as build_response:
self.connect.post_event(self.single_event)
url = "{0}/events/{1}".format(BASE_URL,COLLECTION_NAME)
data = json.dumps(self.single_event.body)
post.assert_called_once_with(url=url, data=data, timeout=60)
build_response.assert_called_once_with(response_body=None,
raw_event=self.single_event.body,
status_code=200)
build_response.reset_mock()
# Non-empty response (!= 200)
body = {"errorMessage": "Maximum event size of 64kb exceeded."}
mocked_response = mocked_connect_response(413,body)
post.return_value = mocked_response
self.connect.post_event(self.single_event)
build_response.assert_called_once_with(response_body=body,
raw_event=self.single_event.body,
status_code=413)
def test_post_events(self,post):
events = []
expected_events = defaultdict(list)
for e in MULTI_EVENT_DATA:
events.append(Event(COLLECTION_NAME, e))
expected_events[COLLECTION_NAME].append(e)
body = {
COLLECTION_NAME: [{
"event": events[0].body,
"success": True
}, {
"event": events[1].body,
"success": False,
"message": "An error occured inserting the event please try again."
}]
}
mocked_response = mocked_connect_response(200,body)
post.return_value = mocked_response
result = self.connect.post_events(self.multi_events)
url = "{0}/events".format(BASE_URL)
data = json.dumps(self.multi_events)
post.assert_called_with(url=url, data=data, timeout=60)
assert isinstance(result,responses.PushBatchResponse)
with patch("connect.api.ConnectApi._build_batch_response") as build_batch_response:
self.connect.post_events(self.multi_events)
build_batch_response.assert_called_once_with(response_body=body,
events_by_collection=self.multi_events,
status_code=200)
def test__build_response(self, post):
single_event = Event(COLLECTION_NAME,SINGLE_EVENT_DATA)
r = self.connect._build_response(None, single_event.body,200)
assert isinstance(r, responses.PushResponse)
assert r.error_message is None
assert r.http_status_code == 200
assert r.event == single_event.body
r = self.connect._build_response(None, single_event.body,401)
assert isinstance(r, responses.PushResponse)
assert r.error_message == "Unauthorised. Please check your Project Id and API Key"
assert r.event == single_event.body
assert r.http_status_code == 401
response_body = {
"errors": [{
"field": "fieldName",
"description": "There was an error with this field."
}]
}
r = self.connect._build_response(response_body, single_event.body, 422)
assert isinstance(r, responses.PushResponse)
assert r.error_message == [{
"field": "fieldName",
"description": "There was an error with this field."
}]
assert r.event == single_event.body
assert r.http_status_code == 422
response_body = {
"errorMessage": "Maximum event size of 64kb exceeded."
}
r = self.connect._build_response(response_body, single_event.body, 413)
assert isinstance(r, responses.PushResponse)
assert r.error_message == "Maximum event size of 64kb exceeded."
assert r.event == single_event.body
assert r.http_status_code == 413
def test__build_batch_response(self, post):
response_body = {
COLLECTION_NAME: [
{
"success": True
},
{
"success": False,
"message": "An error occurred inserting the event please try again."
}
],
"my_collection2": [
{
"success": True
}
]
}
events = defaultdict(list)
events["my_collection2"].append(SINGLE_EVENT_DATA)
events[COLLECTION_NAME] = [e for e in MULTI_EVENT_DATA]
r = self.connect._build_batch_response(response_body,
events,
200)
assert isinstance(r, responses.PushBatchResponse)
assert r.http_status_code == 200
assert r.error_message is None
#to do: assert event body
for collection in r.results:
collection_results = r.results[collection]
for i in range(0, len(collection_results)):
assert isinstance(collection_results[i], responses.PushResponse)
assert collection_results[i].http_status_code is None
if i == 1:
assert collection_results[i].success == False
assert collection_results[i].error_message == "An error occurred inserting the event please try again."
assert collection_results[i].event == MULTI_EVENT_DATA[1]
else:
assert collection_results[i].success == True
assert collection_results[i].error_message is None
response_body = { "errorMessage": "An error occurred while processing your request"}
r = self.connect._build_batch_response(response_body,
events,
500)
assert r.http_status_code == 500
assert r.error_message == "An error occurred while processing your request"
assert r.results == {}
def test_create_session(self, post):
s = self.connect._create_session()
assert isinstance(s,Session)
assert s.headers["Content-Type"] == "application/json"
assert s.headers["X-Api-Key"] == API_PUSH_KEY
assert s.headers["X-Project-Id"] == PROJECT_ID
class mocked_connect_response(object):
"""
An object used to to mock a response from the Connect API. This is used in
return values for mocked objects so that they can be tested
"""
def __init__(self,status_code, body):
self.status_code = status_code
self._body = body
def json(self):
if self._body is not None:
return self._body
else:
raise ValueError
|
getconnect/connect-python
|
tests/test_api.py
|
Python
|
mit
| 9,933 | 0.010772 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ncbi.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
erickpeirson/django-ncbi
|
ncbi/manage.py
|
Python
|
gpl-3.0
| 247 | 0 |
#!/usr/bin/env python3
"""
s2_pi.py
Copyright (c) 2016-2018 Alan Yorinks All right reserved.
Python Banyan is free software; you can redistribute it and/or
modify it under the terms of the GNU AFFERO GENERAL PUBLIC LICENSE
Version 3 as published by the Free Software Foundation; either
or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU AFFERO GENERAL PUBLIC LICENSE
along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import json
import os
import sys
import time
from subprocess import call
import pigpio
import psutil
from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
# This class inherits from WebSocket.
# It receives messages from the Scratch and reports back for any digital input
# changes.
class S2Pi(WebSocket):
def handleMessage(self):
# get command from Scratch2
payload = json.loads(self.data)
print(payload)
client_cmd = payload['command']
# When the user wishes to set a pin as a digital Input
if client_cmd == 'input':
pin = int(payload['pin'])
self.pi.set_glitch_filter(pin, 20000)
self.pi.set_mode(pin, pigpio.INPUT)
self.pi.callback(pin, pigpio.EITHER_EDGE, self.input_callback)
# when a user wishes to set the state of a digital output pin
elif client_cmd == 'digital_write':
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
state = payload['state']
if state == '0':
self.pi.write(pin, 0)
else:
self.pi.write(pin, 1)
# when a user wishes to set a pwm level for a digital input pin
elif client_cmd == 'analog_write':
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
value = int(payload['value'])
self.pi.set_PWM_dutycycle(pin, value)
elif client_cmd == 'servo':
# HackEduca ---> When a user wishes to set a servo:
# Using SG90 servo:
# 180° = 2500 Pulses; 0° = 690 Pulses
# Want Servo 0°-->180° instead of 180°-->0°:
# Invert pulse_max to pulse_min
# pulse_width = int((((pulse_max - pulse_min)/(degree_max - degree_min)) * value) + pulse_min)
# Where:
# Test the following python code to know your Pulse Range: Replace it in the formula
# >>>>----------------------->
# import RPi.GPIO as GPIO
# import pigpio
# Pulse = 690 # 0°
# Pulse = 2500 # 180°
# pi = pigpio.pi()
# pi.set_mode(23, pigpio.OUTPUT)
# pi.set_servo_pulse_width(23, Pulse)
# pi.stop()
# <------------------------<<<<<
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
value = int(payload['value'])
DegreeMin = 0
DegreeMax = 180
PulseMin = 2500
PulseMax = 690
Pulsewidth = int((((PulseMax - PulseMin) / (DegreeMax - DegreeMin)) * value) + PulseMin)
self.pi.set_servo_pulsewidth(pin, Pulsewidth)
time.sleep(0.01)
# when a user wishes to output a tone
elif client_cmd == 'tone':
pin = int(payload['pin'])
self.pi.set_mode(pin, pigpio.OUTPUT)
frequency = int(payload['frequency'])
frequency = int((1000 / frequency) * 1000)
tone = [pigpio.pulse(1 << pin, 0, frequency),
pigpio.pulse(0, 1 << pin, frequency)]
self.pi.wave_add_generic(tone)
wid = self.pi.wave_create()
if wid >= 0:
self.pi.wave_send_repeat(wid)
time.sleep(1)
self.pi.wave_tx_stop()
self.pi.wave_delete(wid)
elif client_cmd == 'ready':
pass
else:
print("Unknown command received", client_cmd)
# call back from pigpio when a digital input value changed
# send info back up to scratch
def input_callback(self, pin, level, tick):
payload = {'report': 'digital_input_change', 'pin': str(pin), 'level': str(level)}
print('callback', payload)
msg = json.dumps(payload)
self.sendMessage(msg)
def handleConnected(self):
self.pi = pigpio.pi()
print(self.address, 'connected')
def handleClose(self):
print(self.address, 'closed')
def run_server():
# checking running processes.
# if the backplane is already running, just note that and move on.
found_pigpio = False
for pid in psutil.pids():
p = psutil.Process(pid)
if p.name() == "pigpiod":
found_pigpio = True
print("pigpiod is running")
else:
continue
if not found_pigpio:
call(['sudo', 'pigpiod'])
print('pigpiod has been started')
os.system('scratch2&')
server = SimpleWebSocketServer('', 9000, S2Pi)
server.serveforever()
if __name__ == "__main__":
try:
run_server()
except KeyboardInterrupt:
sys.exit(0)
|
MrYsLab/s2-pi
|
s2_pi/s2_pi.py
|
Python
|
agpl-3.0
| 5,512 | 0.000727 |
#!/usr/bin/env python
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import cm
import numpy as np# reshape
from cstoolkit import drange
from matplotlib.colors import LinearSegmentedColormap
"""
cmap_cs_precp = [ (242, 242, 242), (191, 239, 255), (178, 223, 238),
(154, 192, 205), ( 0, 235, 235), ( 0, 163, 247),
(153, 255, 51),( 0, 255, 0), ( 0, 199, 0), ( 0, 143, 0),
( 0, 63, 0), (255, 255, 0),(255, 204, 0) , (255, 143, 0),
(255, 0, 0), (215, 0, 0),
(255, 0, 255) ] #, (155, 87, 203)]
"""
cmap_cs_precp = [ (242, 242, 242), (178, 223, 238), (154, 192, 205), (68, 176, 213),
( 0, 163, 247), ( 0, 235, 235), (153, 255, 51 ), ( 0, 255, 0),
( 0, 199, 0), ( 0, 143, 0), ( 0, 63, 0), (255, 255, 0),
( 255, 204, 0), (255, 143, 0), (255, 0, 0), (215, 0, 0),
(255, 0, 255) ] #, (155, 87, 203)]
WBGYR=[#(255,255,255),
#(252,254,255),
#(250,253,255),
#(247,252,254),
#(244,251,254),
#(242,250,254),
#(239,249,254),
#(236,248,253),
#(234,247,253),
#(231,246,253),
#(229,245,253),
#(226,244,253),
#(223,243,252),
#(221,242,252),
#(218,241,252),
#(215,240,252),
#(213,239,252),
#(210,238,251),
#(207,237,251),
#(205,236,251),
#(202,235,251),
#(199,234,250),
#(197,233,250),
#(194,232,250),
#(191,231,250),
#(189,230,250),
#(186,229,249),
(183,228,249),
(181,227,249),
(178,226,249),
(176,225,249),
(173,224,248),
(170,223,248),
(168,222,248),
(165,221,248),
(162,220,247),
(157,218,247),
(155,216,246),
(152,214,245),
(150,212,243),
(148,210,242),
(146,208,241),
(143,206,240),
(141,204,238),
(139,202,237),
(136,200,236),
(134,197,235),
(132,195,234),
(129,193,232),
(127,191,231),
(125,189,230),
(123,187,229),
(120,185,228),
(118,183,226),
(116,181,225),
(113,179,224),
(111,177,223),
(109,175,221),
(106,173,220),
(104,171,219),
(102,169,218),
(100,167,217),
(97,165,215),
(95,163,214),
(93,160,213),
(90,158,212),
(88,156,211),
(86,154,209),
(83,152,208),
(81,150,207),
(79,148,206),
(77,146,204),
(72,142,202),
(72,143,198),
(72,144,195),
(72,145,191),
(72,146,188),
(72,147,184),
(72,148,181),
(72,149,177),
(72,150,173),
(72,151,170),
(72,153,166),
(72,154,163),
(72,155,159),
(72,156,156),
(72,157,152),
(72,158,148),
(72,159,145),
(72,160,141),
(72,161,138),
(73,162,134),
(73,163,131),
(73,164,127),
(73,165,124),
(73,166,120),
(73,167,116),
(73,168,113),
(73,169,109),
(73,170,106),
(73,172,102),
(73,173,99),
(73,174,95),
(73,175,91),
(73,176,88),
(73,177,84),
(73,178,81),
(73,179,77),
(73,181,70),
(78,182,71),
(83,184,71),
(87,185,72),
(92,187,72),
(97,188,73),
(102,189,74),
(106,191,74),
(111,192,75),
(116,193,75),
(121,195,76),
(126,196,77),
(130,198,77),
(135,199,78),
(140,200,78),
(145,202,79),
(150,203,80),
(154,204,80),
(159,206,81),
(164,207,81),
(169,209,82),
(173,210,82),
(178,211,83),
(183,213,84),
(188,214,84),
(193,215,85),
(197,217,85),
(202,218,86),
(207,220,87),
(212,221,87),
(217,222,88),
(221,224,88),
(226,225,89),
(231,226,90),
(236,228,90),
(240,229,91),
(245,231,91),
(250,232,92),
(250,229,91),
(250,225,89),
(250,222,88),
(249,218,86),
(249,215,85),
(249,212,84),
(249,208,82),
(249,205,81),
(249,201,80),
(249,198,78),
(249,195,77),
(248,191,75),
(248,188,74),
(248,184,73),
(248,181,71),
(248,178,70),
(248,174,69),
(248,171,67),
(247,167,66),
(247,164,64),
(247,160,63),
(247,157,62),
(247,154,60),
(247,150,59),
(247,147,58),
(246,143,56),
(246,140,55),
(246,137,53),
(246,133,52),
(246,130,51),
(246,126,49),
(246,123,48),
(246,120,47),
(245,116,45),
(245,113,44),
(245,106,41),
(244,104,41),
(243,102,41),
(242,100,41),
(241,98,41),
(240,96,41),
(239,94,41),
(239,92,41),
(238,90,41),
(237,88,41),
(236,86,41),
(235,84,41),
(234,82,41),
(233,80,41),
(232,78,41),
(231,76,41),
(230,74,41),
(229,72,41),
(228,70,41),
(228,67,40),
(227,65,40),
(226,63,40),
(225,61,40),
(224,59,40),
(223,57,40),
(222,55,40),
(221,53,40),
(220,51,40),
(219,49,40),
(218,47,40),
(217,45,40),
(217,43,40),
(216,41,40),
(215,39,40),
(214,37,40),
(213,35,40),
(211,31,40),
(209,31,40),
(207,30,39),
(206,30,39),
(204,30,38),
(202,30,38),
(200,29,38),
(199,29,37),
(197,29,37),
(195,29,36),
(193,28,36),
(192,28,36),
(190,28,35),
(188,27,35),
(186,27,34),
(185,27,34),
(183,27,34),
(181,26,33),
(179,26,33),
(178,26,32),
(176,26,32),
(174,25,31),
(172,25,31),
(171,25,31),
(169,25,30),
(167,24,30),
(165,24,29),
(164,24,29),
(162,23,29),
(160,23,28),
(158,23,28),
(157,23,27),
(155,22,27),
(153,22,27),
(151,22,26),
(150,22,26),
(146,21,25)]
hotcold18= [( 24 , 24 ,112),
( 16 , 78 ,139),
( 23 ,116 ,205),
( 72 ,118 ,255),
( 91 ,172 ,237),
( 173 ,215 ,230),
( 209 ,237 ,237),
( 229 ,239 ,249),
#( 242 ,255 ,255),
( 255 ,255 ,255),
#( 253 ,245 ,230),
( 255 ,228 ,180),
( 243 ,164 , 96),
( 237 ,118 , 0),
( 205 ,102 , 29),
( 224 , 49 , 15),
#( 255, 0 , 0),
( 255, 0 , 255),
(183,75,243),
(183,75,243)]
#(255,0,255)] #,
#(81,9,121)]
"""
( 237 , 0 , 0),
( 205 , 0 , 0),
( 139 , 0 , 0)]
"""
haxby= [ (37,57,175) ,
(37,68,187) ,
(38,79,199) ,
(38,90,211) ,
(39,101,223) ,
(39,113,235) ,
(40,124,247) ,
(41,134,251) ,
(43,144,252) ,
(44,154,253) ,
(46,164,253) ,
(47,174,254) ,
(49,184,255) ,
(54,193,255) ,
(62,200,255) ,
(71,207,255) ,
(80,214,255) ,
(89,221,255) ,
(98,229,255) ,
(107,235,254) ,
(112,235,241) ,
(117,235,228) ,
(122,235,215) ,
(127,236,202) ,
(132,236,189) ,
(137,236,177) ,
(147,238,172) ,
(157,241,171) ,
(168,244,169) ,
(178,247,167) ,
(189,250,165) ,
(200,253,163) ,
(208,253,159) ,
(213,250,152) ,
(219,247,146) ,
(224,244,139) ,
(230,241,133) ,
(236,238,126) ,
(240,235,120) ,
(243,227,115) ,
(245,220,109) ,
(248,212,104) ,
(250,205,98) ,
(252,197,93) ,
(255,190,88) ,
(255,185,84) ,
(255,181,81) ,
(255,176,78) ,
(255,172,75) ,
(255,167,72) ,
(255,163,69) ,
(255,163,74) ,
(255,167,85) ,
(255,171,95) ,
(255,175,105) ,
(255,179,115) ,
(255,183,126) ,
(255,189,139) ,
(255,200,158) ,
(255,211,178) ,
(255,222,197) ,
(255,233,216) ,
(255,244,236) ,
(255,255,255) ]
BWR=[ ( 36 , 0 , 216),
( 24 , 28 , 247),
( 40 , 87 , 255),
( 61 , 135 , 255),
( 86 , 176 , 255),
( 117 , 211 , 255),
( 153 , 234 , 255),
( 188 , 249 , 255),
( 234 , 255 , 255),
( 255 , 255 , 255),
( 255 , 241 , 188),
( 255 , 214 , 153),
( 255 , 172 , 117),
( 255 , 120 , 86),
( 255 , 61 , 61),
#( 247 , 39 , 53),
( 165 , 0 , 33)]
"""
( 216 , 21 , 47),
( 165 , 0 , 33)]
"""
BWR=[ #( 0 , 0 , 0),
( 16 , 78 , 139),
#( 23 , 116 , 205),
#( 61 , 135 , 255),
( 86 , 176 , 255),
( 117 , 211 , 255),
( 153 , 234 , 255),
( 188 , 249 , 255),
( 234 , 255 , 255),
( 255 , 255 , 255),
( 255 , 241 , 188),
( 255 , 214 , 153),
( 255 , 172 , 117),
( 255 , 120 , 86),
( 255 , 61 , 61),
( 165 , 0 , 33)]
#( 247 , 39 , 53)]
tableau20 = [ (127, 127, 127),(174, 199, 232), (31, 119, 180), (255, 187, 120),
(214, 39, 40),(152, 223, 138), (44, 160, 44), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (255, 127, 14),(199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229),(65,68,81),(0,0,0)]
def buildcmp(cmaplist):
for i in range(len(cmaplist)):
r, g, b = cmaplist[i]
cmaplist[i] = (r / 255., g / 255., b / 255.)
return LinearSegmentedColormap.from_list( "precip", cmaplist,N=len(cmaplist)),cmaplist
cmap_cs_precp,cs_precp_list=buildcmp(cmap_cs_precp)
cmap_haxby,haxby_list=buildcmp(haxby[::5])
cmap_BWR,BWR_list=buildcmp(BWR)
cmap_BWR.set_over('purple')
cmap_BWR.set_under('blue')
cmap_cs_precp.set_over('purple')
cmap_tableau20,tableau20=buildcmp(tableau20)
cmap_hotcold18,hotcold18=buildcmp(hotcold18)
cmap_hotcold18.set_over('blueviolet')
cmap_hotcold18.set_under('black')
cmap_WBGYR,WBGYR=buildcmp(WBGYR)
sim_nicename={"ERI":"ERI",
"ERI_CAR":"CWRF-CAR",
"cor":"Inter-annual cor of",
"Xcor":"Cor between ori of",
"Xcorbias":"Cor between bias of",
"RegCM":"RegCM4.6",
"ConRatio":"Conv to Total Pr in PCT95 day",
"PCT":"Extreme Precipitation",
"RAINYDAYS":"Days with Precipitation",
"NX":"North Xinjiang",
"SX":"South Xinjiang",
"WT":"West Tibet",
"ET":"East Tibet",
"ST":"South Tibet",
"IM":"Inner Mongolia",
"SW":"Southwest",
"NE":"Northeast",
"NC":"North China",
"CC":"Central China",
"SC":"South China",
"T2MAX":"T2X",
"AT2M" :"T2M",
"T2MIN":"T2N",
"PRAVG":"PR",
"AT2M97":"A97",
"SDII":"DI",
"CN_OBS":"OBS",
#"RAINYDAYS":"RD",
"run_RegCM4.6":"RegCM\n4.6",
"run_RegCM4.5":"RegCM\n4.5",
"ERI_run_0":"old CWRF",
"new_ERI_run_0":"CWRF\nMor",
"new_ERI_gsfc":"CWRF",
"new_ERI_albedo":"CWRF",
# "new_ERI_gsfc":"CWRF\nGSFC",
"new_ERI_morr":"Mor",
"run_00":"CTL",
"xoml":"new_xoml",
"run_01":"BMJ",
"run_02":"NKF",
"run_03":"NSAS",
"run_04":"TDK",
"run_06":"MB",
"run_06":"THO",
"run_07":"MOR",
"run_08":"WD6",
"run_09":"AER",
"run_10": "XR", # "Radall",
"run_11":"CCCMA",
"run_12":"FLG",
"run_13":"RRTMG",
"run_14":"MYNN",
"run_15":"ACM",
"run_16":"UW",
"run_17":"NOAH",
"run_18":"XOML",
"run_19":"F-M",
"run_20":"FMBMJ",
"run_21":"FMNKF",
"run_22":"FMNSAS",
"run_23":"FMTDK",
"run_24":"FMMB",#"scheme_cst_2",
"run_25":"FMTHO",#"scheme_cst_3",
"run_26":"FMMOR",#"scheme_cst_3",
"run_27":"boulac",#"scheme_ccb_1",
"run_28":"gfs",#"scheme_ccb_4",
"run_29":"mynn2",#"scheme_ccb_5",
"run_30":"new cloud",#"scheme_ccs_3",
"run_31":"boulac", #"NewTHO",
"run_32":"gfs2", #"NewMOR",
"run_33":"", #"NewMOR",
"run_34":"New Melt", #"NewMOR",
"run_35":"old CAM", #"NewMOR",
"run_36":"NewSW", #"NewMOR",
"run_37":"ACM", #"NewMOR",
"run_38":"bedrock", #"NewMOR",
"run_39":"CF", #"NewMOR",
"run_40":"NewDrain V0", #"NewMOR",
"run_41":"Warm start V1", #"NewMOR",
"run_42":"Cold start V1", #"NewMOR",
"run_43":"inflx ", #"NewMOR",
"run_44":"om ", #"NewMOR",
"run_45":"New Soil Water", #"NewMOR",
"run_46":"New Reff", #"NewMOR",
"run_47":"OISST", #"NewMOR",
"run_48":"NOoml", #"NewMOR",
"run_49":"NOocean", #"NewMOR",
"run_50":"MSA_newSW", #"ERIsst"
"run_51":"NoMSA ipf0", #"NewMOR",
"run_52":"new UWCAM", #"NewMOR",
"run_53":"NoMSA ipf2", #"NewMOR",
"run_54":"AERO_MSAon", #"NewMOR",
"run_55":"AERO_MSAold", #"NewMOR",
"run_56":"noAERO", #"NewMOR",
"run_57":"OBC_V0", #"SVNcode", #"NewMOR",
"run_58":"OBClg100", #"NewMOR",
"run_59":"OBClg111", #"NewMOR",
"run_60":"WRF", #"NewMOR",
"run_61":"ALBfix", #"NewMOR",
"run_62":"PSFC4_NSW", #"NewMOR",
"run_63":"PSFC4_OSW", #"NewMOR",
"run_64":"psfc4_osw_CAMUW", #"NewMOR",
"run_65":"git558faed", #"NewMOR",
"run_66":"psfc4morr", #"NewMOR",
"run_67":"newsw_morr", #"NewMOR",
"run_68":"psfc4_osw_v2", #"NewMOR",
"run_69":"WRFRUN", #
"run_70":"PSFC4_NSW", #oldini0
"run_71":"PSFC4_V0", #"PSFC4_SVNCODE"
"run_72":"OBC_OSW" , #"oldBC_osw"
"run_73":"PSFC4_br_OSW" , #"oldBC_osw"
"run_74":"OLDini_br_NSW" , #"oldBC_osw"
"run_75":"OLDini_br_V0" , #"oldBC_osw"
"run_76":"OLDini_br_558faed" , #"oldBC_osw"
"run_77":"OVEG_NSW" , #"oldBC_osw"
"run_78":"OVEG_OSW" , #"oldBC_osw"
"run_79":"OVEG_V0" , #"oldBC_osw"
"run_80":"HydRED" , #"oldBC_osw"
"run_81":"CTL" , #"oldBC_osw"
"run_82":"newcam" , #"oldBC_osw"
"run_oldSW_flw8_new":"CWRF",
"ERI_run_1":"CWRF/CLM4.5",
"CESM_run_0":"CWRF/CSSP",
"CESM_run_1":"CWRF/CLM4.5",
"PCR85-CESM_run_0":"CWRF/CSSP",
"PCR85-CESM_run_1":"CWRF/CLM4.5",
"run_CTL":"CTL ",
"CESM":"CESM",
"run_CLM4.5":"CLM4.5Hyd ",
"run_Red":"HydRed ",
"run_noxoml":"NO xoml ",
"run_nolake":"NO lake ",
"run_oldrad" :"Old Alb ",
"run_oldveg":"Old LAI ",
"run_noforzen":"Old frozen ",
"Mean":"Mean",
"Mean_Sub":"Mean_Sub",
"Med":"Med",
"P85":"P85",
"P80":"P80",
"P70":"P70",
"P10":"P10",
"P20":"P20",
"Obs":"OBS",
"OBS":"OBS",
"Max":"Max",
"run_1":"MY/MO/W1.5/MC0.5/TD0",
"run_2":"CAM/GSFC/W1.5/MC0.75/TD0",
"run_3":"MY/MO/W1.5/MC0.75/TD0",
"run_4":"MY/MO/W1/MC0.75/TD0",
"run_5":"MY/MO/W1/MC0.75/TD0.5",
"run_6":"MY/MO/W1/MC1/TD0",
"run_7":"MY/MO/W1/MC1/TD1"}
#plotres={'PRAVG':{},'PCT':{},'CDD':{},'RAINYDAYS':{},'AT2M':{},'ASWUPT':{}}
from collections import defaultdict
plotres= defaultdict(dict)
##########################set the plot related parameters#####################
#plotres['XRSUR']['cleve1']=[x*1e-6 for x in range(31)]
plotres['XRSUR']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['XRSUR']['cmp2']=cmp
#plotres['XRSUR']['convertcoef']=0.001
plotres['XRSUR']['unit']="kg/m2/day"
plotres['XRSUR']['mask']=True
plotres['XRSUR']['violion']=False
#plotres['XRBAS']['cleve1']=[x*1e-6 for x in range(31)]
plotres['XRBAS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['XRBAS']['cmp2']=cmp
plotres['XRBAS']['unit']="kg/m2/day"
plotres['XRBAS']['mask']=True
plotres['XRBAS']['violion']=False
#plotres['SFROFF']['cleve1']=[x*10000 for x in range(31)]
plotres['SFROFF']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['SFROFF']['cmp2']=cmp
#plotres['SFROFF']['convertcoef']=0.001
plotres['SFROFF']['unit']="kg/m2"
plotres['SFROFF']['mask']=True
plotres['SFROFF']['violion']=False
#plotres['XSMTg']['cleve1']=[x*20 for x in range(1,20)] #range(0, 1,0.05)
plotres['XSMTg']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['XSMTg']['cmp2']=cmp
plotres['XSMTg']['unit']="kg/m2"
plotres['XSMTg']['mask']=True
plotres['XSMTg']['violion']=False
plotres['XSMTg']['vlevel']=4
#plotres['AODNIR']['cleve0']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
#plotres['AODNIR']['cleve1']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
plotres['AODNIR']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['AODNIR']['cmp2']=cmp
#plotres['AODNIR']['convertcoef']=0.01
plotres['AODNIR']['unit']=""
plotres['AODNIR']['mask']=True
#plotres['AODVIS']['cleve0']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
#plotres['AODVIS']['cleve1']=[x*0.05 for x in range(0,11)] #range(0, 1,0.05)
plotres['AODVIS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['AODVIS']['cmp2']=cmp
#plotres['AODVIS']['convertcoef']=0.01
plotres['AODVIS']['unit']=""
plotres['AODVIS']['mask']=True
#plotres['CLDFRAh']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRAh']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRAh']['cmp2']=cmp
#plotres['CLDFRAh']['convertcoef']=0.01
plotres['CLDFRAh']['unit']=""
plotres['CLDFRAh']['mask']=True
plotres['CLDFRAh']['violion']=False
plotres['CLDFRAh']['vlevel']=3
#plotres['CLDFRAm']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRAm']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRAm']['cmp2']=cmp
#plotres['CLDFRAm']['convertcoef']=0.01
plotres['CLDFRAm']['unit']=""
plotres['CLDFRAm']['mask']=True
plotres['CLDFRAm']['violion']=False
plotres['CLDFRAm']['vlevel']=2
#plotres['CLDFRAl']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRAl']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRAl']['cmp2']=cmp
#plotres['CLDFRAl']['convertcoef']=0.01
plotres['CLDFRAl']['unit']=""
plotres['CLDFRAl']['mask']=True
plotres['CLDFRAl']['violion']=False
plotres['CLDFRAl']['vlevel']=1
#plotres['CLDFRA']['cleve1']=[x*0.05 for x in range(0,21)] #range(0, 1,0.05)
plotres['CLDFRA']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['CLDFRA']['cmp2']=cmp
#plotres['CLDFRA']['convertcoef']=0.01
plotres['CLDFRA']['unit']=""
plotres['CLDFRA']['mask']=True
plotres['CLDFRA']['violion']=False
plotres['CLDFRA']['vlevel']=0
#plotres['QVAPOR']['cleve1']=range(0, 20,1)
plotres['QVAPOR']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['QVAPOR']['cmp2']=cmp
plotres['QVAPOR']['convertcoef']=1000
plotres['QVAPOR']['unit']="$g/kg$"
plotres['QVAPOR']['mask']=False
plotres['QVAPOR']['violion']=False
plotres['QVAPOR']['vlevel']=21
#plotres['TCWPC']['cleve1']=range(0, 200,10)
plotres['TCWPC']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['TCWPC']['cmp2']=cmp
plotres['TCWPC']['unit']="$g/m^{2}$"
plotres['TCWPC']['mask']=True
plotres['TCWPC']['violion']=False
#plotres['V']['cleve1']=range(-10, 10,1)
plotres['V']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['V']['cmp2']=cmp
plotres['V']['unit']="$m/s$"
plotres['V']['mask']=False
plotres['V']['violion']=False
plotres['V']['vlevel']=21
#plotres['U']['cleve1']=range(-10, 10,1)
plotres['U']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['U']['cmp2']=cmp
plotres['U']['unit']="$m/s$"
plotres['U']['mask']=False
plotres['U']['violion']=False
plotres['U']['vlevel']=21
#plotres['PSL']['cleve1']=range(1000, 1024,1)
plotres['PSL']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['PSL']['cmp2']=cmp
plotres['PSL']['unit']="$\%$"
plotres['PSL']['convertcoef']=0.01
plotres['PSL']['mask']=False
plotres['PSL']['violion']=False
#plotres['PS']['cleve1']=range(700, 1030,5)
plotres['PS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['PS']['cmp2']=cmp
plotres['PS']['unit']="$\%$"
plotres['PS']['convertcoef']=0.01
plotres['PS']['mask']=False
plotres['PS']['violion']=False
#plotres['ALBEDO']['cleve1']=range(0, 60,5)
plotres['ALBEDO']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALBEDO']['cmp2']=cmp
plotres['ALBEDO']['unit']="$\%$"
plotres['ALBEDO']['convertcoef']=100
plotres['ALBEDO']['mask']=False
plotres['ALBEDO']['violion']=False
#plotres['ASWUPT']['cleve1']=range(80,160,10)
plotres['ASWUPT']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ASWUPT']['cmp2']=cmp
plotres['ASWUPT']['unit']="$W m^{-2}$"
plotres['ASWUPT']['mask']=True
plotres['ASWUPT']['violion']=False
#plotres['ASWUPS']['cleve1']=range(0,210,10)
plotres['ASWUPS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ASWUPS']['cmp2']=cmp
plotres['ASWUPS']['unit']="$W m^{-2}$"
plotres['ASWUPS']['mask']=True
plotres['ASWUPS']['violion']=False
#plotres['ALWDNS']['cleve1']=range(20,410,50)
#plotres['ALWDNS']['cleve0']=range(20,410,10)
plotres['ALWDNS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWDNS']['cmp2']=cmp
plotres['ALWDNS']['unit']="$W m^{-2}$"
plotres['ALWDNS']['mask']=True
plotres['ALWDNS']['violion']=False
#plotres['ASWDNS']['cleve1']=range(20,410,50)
#plotres['ASWDNS']['cleve0']=range(20,410,10)
plotres['ASWDNS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ASWDNS']['cmp2']=cmp
plotres['ASWDNS']['unit']="$W m^{-2}$"
plotres['ASWDNS']['mask']=True
plotres['ASWDNS']['violion']=False
#plotres['ALWUPS']['cleve1']=range(200,510,10)
plotres['ALWUPS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWUPS']['cmp2']=cmp
plotres['ALWUPS']['unit']="$W m^{-2}$"
plotres['ALWUPS']['mask']=True
plotres['ALWUPS']['violion']=False
#plotres['ALWDNS']['cleve1']=range(150,450,10)
plotres['ALWDNS']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWDNS']['cmp2']=cmp
plotres['ALWDNS']['unit']="$W m^{-2}$"
plotres['ALWDNS']['mask']=True
plotres['ALWDNS']['violion']=False
#plotres['ALWUPT']['cleve1']=range(150,360,10)
plotres['ALWUPT']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['ALWUPT']['cmp2']=cmp
plotres['ALWUPT']['unit']="$W m^{-2}$"
plotres['ALWUPT']['mask']=True
plotres['ALWUPT']['violion']=False
#plotres['PrMAX']['cleve0']=range(1,35)
#plotres['PrMAX']['cleve1']=range(0,51,5)
# import colormaps as cmaps
# cmp=cmap=cmaps.viridis
plotres['PrMAX']['cmp1']=plt.get_cmap('jet')
#plotres['PrMAX']['cmp1']=cm.s3pcpn
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['PrMAX']['cmp2']=cmp
plotres['PrMAX']['unit']="mm/day"
plotres['PrMAX']['convertcoef']=60*60*24
plotres['PrMAX']['mask']=True
plotres['PrMAX']['violion']=True
#plotres['PRAVG']['cleve1']=[0.5,1.0,1.5,2.0,2.5,3.0,3.5,4.0,4.5,5,6,7,8,9,10,11,12,13,14]
#plotres['PRAVG']['cleve3']=range(10)
plotres['PRAVG']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('Spectral_r');cmp.set_over('maroon');cmp.set_under('w')
plotres['PRAVG']['cmp3']=plt.get_cmap('RdYlBu_r') #cmap_WBGYR #plt.get_cmap('jet')
cmp =cmap_BWR
plotres['PRAVG']['cmp2']=cmp
plotres['PRAVG']['unit']="mm/day"
plotres['PRAVG']['violion']=True
#plotres['R95T']['cleve1']=[x*0.04 for x in range(0,21)] #range(0, 1,0.05)
#plotres['R95T']['cleve0']=[x*0.04 for x in range(0,21)] #range(0, 1,0.05)
plotres['R95T']['cmp1']=plt.get_cmap('jet')
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['R95T']['cmp2']=cmp
plotres['R95T']['unit']=""
plotres['R95T']['convertcoef']=1
#plotres['PCT']['cleve0']=[0,2,4,6,8,10,15,20,25,30,40,50,60]
#plotres['PCT']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['PCT']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('w')
plotres['PCT']['cmp2']=cmp
plotres['PCT']['unit']="mm/day"
plotres['PCT']['convertcoef']=1
plotres['ConRatio']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('w')
plotres['ConRatio']['cmp2']=cmp
plotres['ConRatio']['unit']=""
#plotres['PCT99']['cleve0']=[0,2,4,6,8,10,15,20,25,30,40,50,60]
#plotres['PCT99']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['PCT99']['cmp1']=cmap_cs_precp
cmp =plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('w')
plotres['PCT99']['cmp2']=cmp
plotres['PCT99']['unit']="mm/day"
plotres['PCT99']['convertcoef']=1
#plotres['CDD']['cleve0']=[-20,-18,-16,-14,-10,-8,-6,-4,-2,2,4,6,8,10,12,14,16,18,20,22]
#plotres['CDD']['cleve1']=[4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['CDD']['cmp1']=cmap_cs_precp
plotres['CDD']['cmp2']=None
plotres['CDD']['unit']="day"
plotres['CDD']['convertcoef']=1
plotres['CDD']['mask']=True
#plotres['SDII']['cleve0']=range(1,15)
#plotres['SDII']['cleve1']=range(1,20)
plotres['SDII']['cmp1']=cmap_cs_precp
plotres['SDII']['cmp2']=None
plotres['SDII']['unit']="mm/day"
plotres['SDII']['convertcoef']=1
plotres['SDII']['mask']=True
#plotres['R5D']['cleve0']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
#plotres['R5D']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['R5D']['cmp1']=cmap_cs_precp
plotres['R5D']['cmp2']=None
plotres['R5D']['unit']="mm/day"
plotres['R5D']['convertcoef']=1 # divided by 5 days
plotres['R5D']['mask']=True
#plotres['R10']['cleve0']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
#plotres['R10']['cleve1']=[2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,35,40,45,50]
plotres['R10']['cmp1']=cmap_cs_precp
plotres['R10']['cmp2']=None
plotres['R10']['unit']="day"
plotres['R10']['convertcoef']=1
plotres['R10']['mask']=True
#plotres['RAINYDAYS']['cleve0']=range(5,95,5)
#plotres['RAINYDAYS']['cleve1']=range(5,95,5)
plotres['RAINYDAYS']['cmp1']=cmap_cs_precp
plotres['RAINYDAYS']['cmp2']=None
plotres['RAINYDAYS']['unit']="day"
plotres['RAINYDAYS']['convertcoef']=1
plotres['RAINYDAYS']['mask']=True
#plotres['T2MAX']['cleve1']=range(-10,41)
#plotres['T2MAX']['cleve0']=[-9,-8,-7,-6,-5,-4,-3,-2,-1,1,2,3,4,5,6,7]
#plotres['T2MAX']['cmp1']=plt.get_cmap('jet')
plotres['T2MAX']['cmp1']=cmap_cs_precp
plotres['T2MAX']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['T2MAX']['cmp2']=cmp
plotres['T2MAX']['unit']="$^\circ$C"
plotres['T2MAX']['convertcoef']=1
plotres['T2MAX']['mask']=True
plotres['T2MAX']['valuemask']=True
plotres['T2MAX']['shift']=-273.15
#plotres['T2MIN']['cleve1']=range(-10,41)
#plotres['T2MIN']['cleve0']=[-9,-8,-7,-6,-5,-4,-3,-2,-1,1,2,3,4,5,6,7]
#plotres['T2MIN']['cmp1']=plt.get_cmap('jet')
#plotres['T2MIN']['cmp1']=cmap_cs_precp
plotres['T2MIN']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['T2MIN']['cmp2']=cmp
plotres['T2MIN']['unit']="$^\circ$C"
plotres['T2MIN']['convertcoef']=1
plotres['T2MIN']['mask']=True
plotres['T2MIN']['valuemask']=True
plotres['T2MIN']['shift']=-273.15
#plotres['AT2M']['cleve0']=[-9,-8,-7,-6,-5,-4,-3,-2,-1,1,2,3,4,5,6,7]
#plotres['AT2M']['cleve1']=range(-10,31,2)
#plotres['AT2M']['cleve3']=range(10)
plotres['AT2M']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['AT2M']['cmp2']=cmp
plotres['AT2M']['unit']="$^\circ$C"
plotres['AT2M']['convertcoef']=1
plotres['AT2M']['valuemask']=True
plotres['AT2M']['shift']=-273.15
#plotres['AT2M97']['cleve0']=[-10, -9, -8, -7, -6, -5, -4, -3, -2, -1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
#plotres['AT2M97']['cleve1']=range(-15,35,2)
#plotres['AT2M97']['cleve3']=range(10)
plotres['AT2M97']['cmp1']=plt.get_cmap('gist_rainbow_r')
cmp = plt.get_cmap('PuOr_r') #plt.get_cmap('seismic');cmp.set_over('maroon');cmp.set_under('b')
plotres['AT2M97']['cmp2']=cmp
plotres['AT2M97']['unit']="$^\circ$C"
plotres['AT2M97']['convertcoef']=1
plotres['AT2M97']['valuemask']=True
plotres['AT2M97']['shift']=-273.15
#plotres['DTR']['cmp1']=cmap_cs_precp
plotres['DTR']['cmp1']=plt.get_cmap('jet')
cmp =cmap_BWR
plotres['DTR']['cmp2']=cmp
plotres['DTR']['unit']="$^\circ$C"
plotres['DTR']['convertcoef']=1
plotres['DTR']['valuemask']=True
plotres['RH']['cmp1']=plt.get_cmap('viridis_r')
cmp =cmap_BWR
plotres['RH']['cmp2']=cmp
plotres['RH']['unit']="$\%$"
plotres['RH']['convertcoef']=1
plotres['RH']['valuemask']=True
plotres['WIN']['cmp1']=cmap_haxby
cmp =cmap_BWR
plotres['WIN']['cmp2']=cmp
plotres['WIN']['unit']="$m/s$"
plotres['WIN']['convertcoef']=1
plotres['WIN']['valuemask']=True
plotres['GUST']['cmp1']=cmap_haxby
cmp =cmap_BWR
plotres['GUST']['cmp2']=cmp
plotres['GUST']['unit']="$m/s$"
plotres['GUST']['convertcoef']=1
plotres['GUST']['valuemask']=True
|
sunchaoatmo/cplot
|
plotset.py
|
Python
|
gpl-3.0
| 28,891 | 0.077291 |
import numpy as np
import scipy
import math
import argparse
import lightcurve
import powerspectrum
#### MULTIPLY LIGHT CURVES TOGETHER ##################
#
# Little functions that multiplies light curves of different
# processes together.
# Base_lc can be any LightCurve object, even one of the three options given below
# Base_lc should be normalized to the desired mean, rednoise and qpo to 1 and the envelope
# to 1/mean(flux)
#
# base_lc [LightCurve] = base light curve to use --> should be the longest one
# env [list]= burst envelope, deterministic function
# rednoise [list] = red noise profile
# QPO [list] = quasi-periodic oscillation
#
# !!! IMPORTANT!!! MAKE SURE envelope, rednoise AND qpo ARE LISTS, NOT NUMPY ARRAYS!!!
#
#
#
#
def multiply_lightcurves(base_lc, envelope=None, rednoise=None, qpo=None):
if envelope:
base_lc.counts[:len(envelope)] = base_lc.counts[:len(envelope)]*envelope
if rednoise:
base_lc.counts = base_lc.counts*rednoise
if qpo:
base_lc.counts = base_lc.counts*qpo
return
#############################################
#
# python implementation of Timmer+Koenig 1995
# simulations of red noise
#
#
#
class TimmerPS(powerspectrum.PowerSpectrum):
### fnyquist = number of points in light curve / 2.0
### dnu = frequency resolution, dnu = 1.0/length of time interval
### rms = fractional rms amplitude of the light curve
### nu0 = centroid frequency of Lorentzian (for QPO)
### gamma = width of Lorentzian (for QPO)
### pind = power law index
### pnorm = normalization of power law
### nphot = number of photons in light curve
### psshape = shape of power spectrum
### psmanual = put in array with chosen shape
def __init__(self, fnyquist=4096.0, dnu=1.0, rms=1.0, nu0=None, gamma=None, pind=None, pnorm = None, nphot=None, psshape='plaw', psmanual=None):
### make an empty PowerSpectrum object
powerspectrum.PowerSpectrum.__init__(self,lc=None, counts=None)
#### CREATE ARTIFICIAL POWER SPECTRUM
### number of elements in lc/ps
N = np.ceil(2.0*fnyquist/dnu)
#print "N: " + str(N)
### frequency array
self.n = N
self.freq = np.arange(math.floor(N/2.0))*dnu + dnu
self.fnyquist = fnyquist
self.dnu = dnu
self.nphot = nphot
### turn rms into a variance of the log-normal light curve
lnvar = np.log(rms**2.0 + 1.0)
#print("Variance of log-normal light curve: " + str(lnvar))
### make a shape for the power spectrum, depending on
### psshape specified
if psshape.lower() in ['flat', 'constant', 'white', 'white noise']:
## assume white noise power spectrum, <P> = N*sigma_ln
s = np.array([self.n*lnvar for x in self.freq])
elif psshape.lower() in ['powerlaw', 'plaw']:
s = self.n*lnvar*(1.0/self.freq)**pind
### Don't do other shapes for now, until I need them
### CAREFUL: normalization of these is not right yet!
elif psshape.lower() in ['qpo', 'lorentzian', 'periodic']:
#print('I am here!')
alpha = (gamma/math.pi)*dnu*N/2.0
sold = alpha/((self.freq-nu0)**2.0 + gamma**2.0)
snew = sold/sum(sold)
#print('sum snew: ' + str(sum(snew)))
s = (sold/sum(sold))*lnvar*fnyquist*self.n/self.dnu
# elif psshape.lower() in ['w+p', 'combined plaw']:
# s = np.array([rms**2.0+pnorm*(1/x)**2.0 for x in self.freq])
# elif psshape.lower() in ['w+q', 'combined qpo']:
# alpha = (sigma**2.0)*(gamma/math.pi)*dnu*N/2.0
# s = 2.0 + nphot*alpha/((self.freq-nu0)**2.0 + gamma**2.0)
elif psshape.lower() in ['manual', 'psmanual']:
if not psmanual is None:
#print(sum(psmanual/sum(psmanual)))
### for now, assume variance normalization
#s = (psmanual/sum(psmanual))*lnvar*fnyquist*self.n**2.0/2.0
s = (psmanual/sum(psmanual))*lnvar*fnyquist*self.n**2.0/(self.dnu)
#s = (psmanual/sum(psmanual))*self.n*(self.n/2.0)*lnvar
else:
raise Exception("No shape given!")
#sfinal = np.insert(s, 0, 0)
#print "len(s) : " + str(len(s))
#print "type(s): " + str(type(s))
### first element is zero, that will be the number of photons
#sfinal = np.insert(s, 0, 0.0)
### s is the power spectrum
self.s = s #sfinal
def makeFourierCoeffs(self):
nphot = self.nphot
N = self.n
a = np.zeros(len(self.s))
x1 = np.random.normal(size=len(self.s))*(self.s/2.0)**0.5
y1 = np.random.normal(size=len(self.s))*(self.s/2.0)**0.5
### S(fnyquist) is real
y1[-1] = 0.0
self.x1 = x1
self.y1 = y1
### now make complex Fourier pair
Fpos = [complex(re,im) for re, im in zip(x1,y1)]
Fneg = [complex(re,-im) for re, im in zip(x1,y1)]
#print "Fpos: " + str(Fpos[:5])
#print "Fpos: " + str(Fpos[-5:])
#print "Fneg: " + str(Fneg[:5])
#print "Fneg: " + str(Fneg[-5:])
Fpos.insert(0, (0+0j))
Fneg.reverse()
#print "Fneg: " + str(Fneg[:5])
#print "Fneg: " + str(len(Fneg))
#print "Fneg: " + str(Fneg[-5:])
### remove duplicate nyquist frequency and the nu=0 term
#Fneg = Fneg[1:1+int(np.round((N-1)/2))]
#Fneg = Fneg[:-1]
#print "Fneg: " + str(len(Fneg))
#Fpos.extend(Fneg)
Fpos.extend(Fneg)
#print "Fpos: " + str(len(Fpos))
#print "Fpos: " + str(Fpos[:5])
#print "Fpos: " + str(Fpos[1168:1172])
#print "Fpos: " + str(Fpos[-5:])
return Fpos
def simulateLightcurve(self, fourier, expon=True, lcmean = None):
### length of time interval
tmax = 1.0/self.dnu
### sampling rate
dt = tmax/self.n
#print(self.n)
### make a time array
time = np.arange(len(fourier))*tmax/self.n
f = fourier
phi = np.fft.ifft(f)#/np.sqrt(self.n)#/(self.nphot**0.5)
phi = np.array([x.real for x in phi])
### if expon == True, transform into lognormally distributed
###light curve such that there are no values < 0:
if expon == False:
flux = phi
elif expon == True and not lcmean is None:
lncounts = np.exp(phi)
flux = lncounts*lcmean/np.mean(lncounts)
else:
raise Exception("You must either specify a mean flux or set expon=False !")
lc = lightcurve.Lightcurve(time, counts=flux)
return lc
def makePeriodogram(self, fourier, norm='variance'):
f = fourier
f2 = np.array(f).conjugate()
ff2 = np.real(f*f2)
s = ff2[0:self.n/2]#*2.0/(self.fnyquist*2*self.fnyquist)
if norm.lower() in ['variance', 'var']:
s = s*2.0/(self.fnyquist*2*self.fnyquist)
if norm.lower() in ['leahy']:
s = 2.0*s/self.nphot
if norm.lower() in ['rms']:
s = 2.0*s/(df*self.nphot**2.0)
ps = powerspectrum.PowerSpectrum()
ps.freq = self.freq
ps.ps = s
return ps
##########################################################
|
dhuppenkothen/UTools
|
simpower.py
|
Python
|
bsd-2-clause
| 7,317 | 0.02132 |
import ipaddress
import docker.types
def init():
pass
def get_next_cidr(client):
networks = client.networks.list()
last_cidr = ipaddress.ip_network("10.0.0.0/24")
for network in networks:
if (network.attrs["IPAM"] and network.attrs["IPAM"]["Config"]
and len(network.attrs["IPAM"]["Config"]) > 0
and network.attrs["IPAM"]["Config"][0]["Subnet"]):
cidr = ipaddress.ip_network(network.attrs["IPAM"]["Config"][0]["Subnet"])
if cidr.network_address.packed[0] == 10:
if cidr.prefixlen != 24:
raise Exception(
"Invalid network prefix length {0} for network {1}"
.format(cidr.prefixlen, network.name))
if cidr > last_cidr:
last_cidr = cidr
next_cidr = ipaddress.ip_network((last_cidr.network_address + 256).exploded + "/24")
if next_cidr.network_address.packed[0] > 10:
raise Exception("No more networks available")
last_cidr = next_cidr
return next_cidr
def create_network(client, name):
cidr = get_next_cidr(client)
print("Creating network {0} with subnet {1}".format(name, cidr.exploded))
networks = client.networks.list(names=[name])
if len(networks) > 0:
for network in networks:
network.remove()
ipam_pool = docker.types.IPAMPool(subnet=cidr.exploded,
gateway=(cidr.network_address + 1).exploded)
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
client.networks.create(name, ipam=ipam_config)
|
puffinrocks/puffin
|
puffin/core/network.py
|
Python
|
agpl-3.0
| 1,606 | 0.003113 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Execute to run double Cole-Cole term characterization
"""
import sys
sys.path.append('..')
import test_helper as th
import os
import itertools
import numpy as np
import subprocess
testdir = 'data'
def _generate_cc_sets():
"""Generate multiple complex resistivity spectra by sampling a certain
CC-parameter space
"""
rho0_list = np.log(np.array((10, )))
# values the first (low-frequency) term is constructed from
m1_list = (0.05, 0.1)
tau1_list = np.log(np.array((0.4, 1.0)))
c1_list = (0.6, 0.8)
# values for the second term
m2_list = (0.1, )
tau2_list = np.log(np.array((0.0004, 0.00001)))
c2_list = (0.6, )
for cc_set in itertools.product(rho0_list,
m1_list,
tau1_list,
c1_list,
m2_list,
tau2_list,
c2_list
):
yield cc_set
def _get_frequencies():
return np.logspace(-3, 4, 20)
def _fit_spectra():
pwd = os.getcwd()
for directory in th._get_cc_dirs(testdir):
os.chdir(directory)
cmd = 'cc_fit.py -p -c 2 -m 2'
subprocess.call(cmd, shell=True)
os.chdir(pwd)
if __name__ == '__main__':
frequencies = _get_frequencies()
for x in _generate_cc_sets():
print x
th._generate_spectra(frequencies, testdir, _generate_cc_sets)
_fit_spectra()
th._evaluate_fits(testdir)
|
m-weigand/Cole-Cole-fit
|
tests/2-term/test_cases.py
|
Python
|
gpl-3.0
| 1,602 | 0.003121 |
from backend import photos, boards
p = photos()
#print p.new('asdf',1,1)
print p.get(1)
b = boards()
print p.all(1)
print b.get(1)
|
teriyakichild/photoboard
|
photoboard/tests/test.py
|
Python
|
apache-2.0
| 133 | 0.007519 |
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from textwrap import dedent
from mock import MagicMock
from pants.backend.codegen.targets.java_thrift_library import JavaThriftLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.scala_library import ScalaLibrary
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.build_graph.address import Address
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.context import Context
from pants.util.dirutil import safe_rmtree
from pants_test.tasks.task_test_base import TaskTestBase
from twitter.common.collections import OrderedSet
from pants.contrib.scrooge.tasks.scrooge_gen import ScroogeGen
# TODO (tdesai) Issue-240: Use JvmToolTaskTestBase for ScroogeGenTest
class ScroogeGenTest(TaskTestBase):
@classmethod
def task_type(cls):
return ScroogeGen
@property
def alias_groups(self):
return BuildFileAliases(targets={'java_thrift_library': JavaThriftLibrary})
def setUp(self):
super(ScroogeGenTest, self).setUp()
self.task_outdir = os.path.join(self.build_root, 'scrooge', 'gen-java')
def tearDown(self):
super(ScroogeGenTest, self).tearDown()
safe_rmtree(self.task_outdir)
def test_validate_compiler_configs(self):
# Set synthetic defaults for the global scope.
self.set_options_for_scope('thrift-defaults',
compiler='unchecked',
language='uniform',
rpc_style='async')
self.add_to_build_file('test_validate', dedent('''
java_thrift_library(name='one',
sources=[],
dependencies=[],
)
'''))
self.add_to_build_file('test_validate', dedent('''
java_thrift_library(name='two',
sources=[],
dependencies=[':one'],
)
'''))
self.add_to_build_file('test_validate', dedent('''
java_thrift_library(name='three',
sources=[],
dependencies=[':one'],
rpc_style='finagle',
)
'''))
target = self.target('test_validate:one')
context = self.context(target_roots=[target])
task = self.create_task(context)
task._validate_compiler_configs([self.target('test_validate:one')])
task._validate_compiler_configs([self.target('test_validate:two')])
with self.assertRaises(TaskError):
task._validate_compiler_configs([self.target('test_validate:three')])
def test_scala(self):
build_string = '''
java_thrift_library(name='a',
sources=['a.thrift'],
dependencies=[],
compiler='scrooge',
language='scala',
rpc_style='finagle'
)
'''
sources = [os.path.join(self.task_outdir, 'org/pantsbuild/example/Example.scala')]
self._test_help(build_string, ScalaLibrary, sources)
def test_android(self):
build_string = '''
java_thrift_library(name='a',
sources=['a.thrift'],
dependencies=[],
compiler='scrooge',
language='android',
rpc_style='finagle'
)
'''
sources = [os.path.join(self.task_outdir, 'org/pantsbuild/android_example/Example.java')]
self._test_help(build_string, JavaLibrary, sources)
def _test_help(self, build_string, library_type, sources):
contents = dedent('''#@namespace android org.pantsbuild.android_example
namespace java org.pantsbuild.example
struct Example {
1: optional i64 number
}
''')
self.create_file(relpath='test_smoke/a.thrift', contents=contents)
self.add_to_build_file('test_smoke', dedent(build_string))
target = self.target('test_smoke:a')
context = self.context(target_roots=[target])
task = self.create_task(context)
task._declares_service = lambda source: False
task._outdir = MagicMock()
task._outdir.return_value = self.task_outdir
task.gen = MagicMock()
task.gen.return_value = {'test_smoke/a.thrift': sources}
saved_add_new_target = Context.add_new_target
try:
mock = MagicMock()
Context.add_new_target = mock
task.execute()
self.assertEquals(1, mock.call_count)
_, call_kwargs = mock.call_args
self.assertEquals(call_kwargs['target_type'], library_type)
self.assertEquals(call_kwargs['dependencies'], OrderedSet())
self.assertEquals(call_kwargs['provides'], None)
self.assertEquals(call_kwargs['sources'], [])
self.assertEquals(call_kwargs['derived_from'], target)
finally:
Context.add_new_target = saved_add_new_target
|
jtrobec/pants
|
contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_scrooge_gen.py
|
Python
|
apache-2.0
| 4,867 | 0.005137 |
from looker_sdk import methods, models40
import looker_sdk
import exceptions
sdk = looker_sdk.init40("../looker.ini")
def create_simple_schedule(dashboard_id:int,user_id:int,schedule_title:str, format:str, email:str,type:str, message:str, crontab:str):
### For more information on the Params accepted https://github.com/looker-open-source/sdk-codegen/blob/master/python/looker_sdk/sdk/api31/methods.py#L2144
### And for schedule destination go: https://github.com/looker-open-source/sdk-codegen/blob/master/python/looker_sdk/sdk/api31/models.py#L4601
### Supported formats vary by destination, but include: "txt", "csv", "inline_json", "json", "json_detail", "xlsx", "html", "wysiwyg_pdf", "assembled_pdf", "wysiwyg_png"
### type: Type of the address ('email', 'webhook', 's3', or 'sftp')
schedule = sdk.create_scheduled_plan(
body=models40.WriteScheduledPlan(name=schedule_title, dashboard_id=dashboard_id, user_id=user_id, run_as_recipient= True, crontab=crontab, scheduled_plan_destination = [models40.ScheduledPlanDestination(format=format, apply_formatting=True, apply_vis=True, address=email, type=type, message=message)]))
create_simple_schedule(1234,453,"This is an automated test", "assembled_pdf", "test@looker.com", "email", "Hi Looker User!", "0 1 * * *")
|
looker-open-source/sdk-codegen
|
examples/python/simple_schedule_plan.py
|
Python
|
mit
| 1,296 | 0.022377 |
# Copyright (C) 2013 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.object_storage import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class ObjectACLsNegativeTest(base.BaseObjectTest):
"""Negative tests of object ACLs"""
credentials = [['operator', CONF.object_storage.operator_role],
['operator_alt', CONF.object_storage.operator_role]]
@classmethod
def setup_credentials(cls):
super(ObjectACLsNegativeTest, cls).setup_credentials()
cls.os_operator = cls.os_roles_operator_alt
@classmethod
def resource_setup(cls):
super(ObjectACLsNegativeTest, cls).resource_setup()
cls.test_auth_data = cls.os_operator.auth_provider.auth_data
def setUp(self):
super(ObjectACLsNegativeTest, self).setUp()
self.container_name = data_utils.rand_name(name='TestContainer')
self.container_client.update_container(self.container_name)
@classmethod
def resource_cleanup(cls):
cls.delete_containers()
super(ObjectACLsNegativeTest, cls).resource_cleanup()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af587587-0c24-4e15-9822-8352ce711013')
def test_write_object_without_using_creds(self):
"""Test writing object without using credentials"""
# trying to create object with empty headers
# X-Auth-Token is not provided
object_name = data_utils.rand_name(name='Object')
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af85af0b-a025-4e72-a90e-121babf55720')
def test_delete_object_without_using_creds(self):
"""Test deleting object without using credentials"""
# create object
object_name = data_utils.rand_name(name='Object')
self.object_client.create_object(self.container_name, object_name,
'data')
# trying to delete object with empty headers
# X-Auth-Token is not provided
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=None
)
self.assertRaises(lib_exc.Unauthorized,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('63d84e37-55a6-42e2-9e5f-276e60e26a00')
def test_write_object_with_non_authorized_user(self):
"""Test writing object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
# trying to create object with non-authorized user
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name, object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('abf63359-be52-4feb-87dd-447689fc77fd')
def test_read_object_with_non_authorized_user(self):
"""Test reading object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to get object with non authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7343ac3d-cfed-4198-9bb0-00149741a492')
def test_delete_object_with_non_authorized_user(self):
"""Test deleting object with non-authorized user"""
# User provided token is forbidden. ACL are not set
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(
self.container_name, object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# trying to delete object with non-authorized user token
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('9ed01334-01e9-41ea-87ea-e6f465582823')
def test_read_object_without_rights(self):
"""Test reading object without rights"""
# update X-Container-Read metadata ACL
cont_headers = {'X-Container-Read': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to read the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.get_object,
self.container_name, object_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('a3a585a7-d8cf-4b65-a1a0-edc2b1204f85')
def test_write_object_without_rights(self):
"""Test writing object without rights"""
# update X-Container-Write metadata ACL
cont_headers = {'X-Container-Write': 'badtenant:baduser'}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('8ba512ad-aa6e-444e-b882-2906a0ea2052')
def test_write_object_without_write_rights(self):
"""Test writing object without write rights"""
# update X-Container-Read and X-Container-Write metadata ACL
tenant_name = self.os_operator.credentials.tenant_name
username = self.os_operator.credentials.username
cont_headers = {'X-Container-Read':
tenant_name + ':' + username,
'X-Container-Write': ''}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# Trying to write the object without write rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
object_name = data_utils.rand_name(name='Object')
self.assertRaises(lib_exc.Forbidden,
self.object_client.create_object,
self.container_name,
object_name, 'data', headers={})
@decorators.attr(type=['negative'])
@decorators.idempotent_id('b4e366f8-f185-47ab-b789-df4416f9ecdb')
def test_delete_object_without_write_rights(self):
"""Test deleting object without write rights"""
# update X-Container-Read and X-Container-Write metadata ACL
tenant_name = self.os_operator.credentials.tenant_name
username = self.os_operator.credentials.username
cont_headers = {'X-Container-Read':
tenant_name + ':' + username,
'X-Container-Write': ''}
resp_meta, _ = (
self.container_client.create_update_or_delete_container_metadata(
self.container_name, create_update_metadata=cont_headers,
create_update_metadata_prefix=''))
self.assertHeaders(resp_meta, 'Container', 'POST')
# create object
object_name = data_utils.rand_name(name='Object')
resp, _ = self.object_client.create_object(self.container_name,
object_name, 'data')
self.assertHeaders(resp, 'Object', 'PUT')
# Trying to delete the object without write rights
self.object_client.auth_provider.set_alt_auth_data(
request_part='headers',
auth_data=self.test_auth_data
)
self.assertRaises(lib_exc.Forbidden,
self.object_client.delete_object,
self.container_name,
object_name)
|
openstack/tempest
|
tempest/api/object_storage/test_container_acl_negative.py
|
Python
|
apache-2.0
| 10,958 | 0 |
# -*- coding: utf-8 -*-
import functools
import urlparse
import logging
import re
import pytz
from dirtyfields import DirtyFieldsMixin
from include import IncludeManager
from django.db import models
from django.db.models import Q
from django.utils import timezone
from django.contrib.contenttypes.fields import GenericRelation
from django.core.exceptions import ValidationError
from django.dispatch import receiver
from guardian.shortcuts import get_objects_for_user
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from framework.auth import Auth
from framework.exceptions import PermissionsError
from framework.analytics import increment_user_activity_counters
from framework.auth import oauth_scopes
from osf.models import Subject, Tag, OSFUser, PreprintProvider
from osf.models.preprintlog import PreprintLog
from osf.models.contributor import PreprintContributor
from osf.models.mixins import ReviewableMixin, Taggable, Loggable, GuardianMixin
from osf.models.validators import validate_subject_hierarchy, validate_title, validate_doi
from osf.utils.fields import NonNaiveDateTimeField
from osf.utils.workflows import DefaultStates, ReviewStates
from osf.utils import sanitize
from osf.utils.requests import get_request_and_user_id, string_type_request_headers
from website.notifications.emails import get_user_subscriptions
from website.notifications import utils
from website.identifiers.clients import CrossRefClient, ECSArXivCrossRefClient
from website.project.licenses import set_license
from website.util import api_v2_url, api_url_for, web_url_for
from website.citations.utils import datetime_to_csl
from website import settings, mails
from website.preprints.tasks import update_or_enqueue_on_preprint_updated
from osf.models.base import BaseModel, GuidMixin, GuidMixinQuerySet
from osf.models.identifiers import IdentifierMixin, Identifier
from osf.models.mixins import TaxonomizableMixin, ContributorMixin, SpamOverrideMixin
from addons.osfstorage.models import OsfStorageFolder, Region, BaseFileNode, OsfStorageFile
from framework.sentry import log_exception
from osf.exceptions import (
PreprintStateError, InvalidTagError, TagNotFoundError
)
logger = logging.getLogger(__name__)
class PreprintManager(IncludeManager):
def get_queryset(self):
return GuidMixinQuerySet(self.model, using=self._db)
no_user_query = Q(
is_published=True,
is_public=True,
deleted__isnull=True,
primary_file__isnull=False,
primary_file__deleted_on__isnull=True) & ~Q(machine_state=DefaultStates.INITIAL.value) \
& (Q(date_withdrawn__isnull=True) | Q(ever_public=True))
def preprint_permissions_query(self, user=None, allow_contribs=True, public_only=False):
include_non_public = user and not public_only
if include_non_public:
moderator_for = get_objects_for_user(user, 'view_submissions', PreprintProvider)
admin_user_query = Q(id__in=get_objects_for_user(user, 'admin_preprint', self.filter(Q(preprintcontributor__user_id=user.id))))
reviews_user_query = Q(is_public=True, provider__in=moderator_for)
if allow_contribs:
contrib_user_query = ~Q(machine_state=DefaultStates.INITIAL.value) & Q(id__in=get_objects_for_user(user, 'read_preprint', self.filter(Q(preprintcontributor__user_id=user.id))))
query = (self.no_user_query | contrib_user_query | admin_user_query | reviews_user_query)
else:
query = (self.no_user_query | admin_user_query | reviews_user_query)
else:
moderator_for = PreprintProvider.objects.none()
query = self.no_user_query
if not moderator_for.exists():
query = query & Q(Q(date_withdrawn__isnull=True) | Q(ever_public=True))
return query
def can_view(self, base_queryset=None, user=None, allow_contribs=True, public_only=False):
if base_queryset is None:
base_queryset = self
include_non_public = user and not public_only
ret = base_queryset.filter(
self.preprint_permissions_query(
user=user,
allow_contribs=allow_contribs,
public_only=public_only,
) & Q(deleted__isnull=True) & ~Q(machine_state=DefaultStates.INITIAL.value)
)
# The auth subquery currently results in duplicates returned
# https://openscience.atlassian.net/browse/OSF-9058
# TODO: Remove need for .distinct using correct subqueries
return ret.distinct('id', 'created') if include_non_public else ret
class Preprint(DirtyFieldsMixin, GuidMixin, IdentifierMixin, ReviewableMixin, BaseModel,
Loggable, Taggable, GuardianMixin, SpamOverrideMixin, TaxonomizableMixin, ContributorMixin):
objects = PreprintManager()
# Preprint fields that trigger a check to the spam filter on save
SPAM_CHECK_FIELDS = {
'title',
'description',
}
# Node fields that trigger an update to elastic search on save
SEARCH_UPDATE_FIELDS = {
'title',
'description',
'is_published',
'license',
'is_public',
'deleted',
'subjects',
'primary_file',
'contributors',
'tags',
}
# Setting for ContributorMixin
DEFAULT_CONTRIBUTOR_PERMISSIONS = 'write'
provider = models.ForeignKey('osf.PreprintProvider',
on_delete=models.SET_NULL,
related_name='preprints',
null=True, blank=True, db_index=True)
node = models.ForeignKey('osf.AbstractNode', on_delete=models.SET_NULL,
related_name='preprints',
null=True, blank=True, db_index=True)
is_published = models.BooleanField(default=False, db_index=True)
date_published = NonNaiveDateTimeField(null=True, blank=True)
original_publication_date = NonNaiveDateTimeField(null=True, blank=True)
license = models.ForeignKey('osf.NodeLicenseRecord',
on_delete=models.SET_NULL, null=True, blank=True)
identifiers = GenericRelation(Identifier, related_query_name='preprints')
preprint_doi_created = NonNaiveDateTimeField(default=None, null=True, blank=True)
date_withdrawn = NonNaiveDateTimeField(default=None, null=True, blank=True)
withdrawal_justification = models.TextField(default='', blank=True)
ever_public = models.BooleanField(default=False, blank=True)
title = models.TextField(
validators=[validate_title]
) # this should be a charfield but data from mongo didn't fit in 255
description = models.TextField(blank=True, default='')
creator = models.ForeignKey(OSFUser,
db_index=True,
related_name='preprints_created',
on_delete=models.SET_NULL,
null=True, blank=True)
_contributors = models.ManyToManyField(OSFUser,
through=PreprintContributor,
related_name='preprints')
article_doi = models.CharField(max_length=128,
validators=[validate_doi],
null=True, blank=True)
files = GenericRelation('osf.OsfStorageFile', object_id_field='target_object_id', content_type_field='target_content_type')
primary_file = models.ForeignKey('osf.OsfStorageFile', null=True, blank=True, related_name='preprint')
# (for legacy preprints), pull off of node
is_public = models.BooleanField(default=True, db_index=True)
# Datetime when old node was deleted (for legacy preprints)
deleted = NonNaiveDateTimeField(null=True, blank=True)
# For legacy preprints
migrated = NonNaiveDateTimeField(null=True, blank=True)
region = models.ForeignKey(Region, null=True, blank=True, on_delete=models.CASCADE)
groups = {
'read': ('read_preprint',),
'write': ('read_preprint', 'write_preprint',),
'admin': ('read_preprint', 'write_preprint', 'admin_preprint',)
}
group_format = 'preprint_{self.id}_{group}'
class Meta:
permissions = (
('view_preprint', 'Can view preprint details in the admin app'),
('read_preprint', 'Can read the preprint'),
('write_preprint', 'Can write the preprint'),
('admin_preprint', 'Can manage the preprint'),
)
def __unicode__(self):
return '{} ({} preprint) (guid={}){}'.format(self.title, 'published' if self.is_published else 'unpublished', self._id, ' with supplemental files on ' + self.node.__unicode__() if self.node else '')
@property
def is_deleted(self):
return bool(self.deleted)
@property
def root_folder(self):
try:
return OsfStorageFolder.objects.get(name='', target_object_id=self.id, target_content_type_id=ContentType.objects.get_for_model(Preprint).id, is_root=True)
except BaseFileNode.DoesNotExist:
return None
@property
def osfstorage_region(self):
return self.region
@property
def contributor_email_template(self):
return 'preprint'
@property
def file_read_scope(self):
return oauth_scopes.CoreScopes.PREPRINT_FILE_READ
@property
def file_write_scope(self):
return oauth_scopes.CoreScopes.PREPRINT_FILE_WRITE
@property
def visible_contributors(self):
# Overrides ContributorMixin
return OSFUser.objects.filter(
preprintcontributor__preprint=self,
preprintcontributor__visible=True
).order_by('preprintcontributor___order')
@property
def log_class(self):
# Property needed for ContributorMixin
return PreprintLog
@property
def contributor_class(self):
# Property needed for ContributorMixin
return PreprintContributor
@property
def contributor_kwargs(self):
# Property needed for ContributorMixin
return {
'preprint': self
}
@property
def order_by_contributor_field(self):
# Property needed for ContributorMixin
return 'preprintcontributor___order'
@property
def log_params(self):
# Property needed for ContributorMixin
return {
'preprint': self._id
}
@property
def contributor_set(self):
# Property needed for ContributorMixin
return self.preprintcontributor_set
@property
def state_error(self):
# Property needed for ContributorMixin
return PreprintStateError
@property
def is_retracted(self):
return self.date_withdrawn is not None
@property
def verified_publishable(self):
return self.is_published and \
self.is_public and \
self.has_submitted_preprint and not \
self.deleted and not \
self.is_preprint_orphan and not \
(self.is_retracted and not self.ever_public)
@property
def should_request_identifiers(self):
return not self.all_tags.filter(name='qatest').exists()
@property
def has_pending_withdrawal_request(self):
return self.requests.filter(request_type='withdrawal', machine_state='pending').exists()
@property
def has_withdrawal_request(self):
return self.requests.filter(request_type='withdrawal').exists()
@property
def preprint_doi(self):
return self.get_identifier_value('doi')
@property
def is_preprint_orphan(self):
if not self.primary_file_id:
return True
try:
primary_file = self.primary_file
except OsfStorageFile.DoesNotExist:
primary_file = None
if not primary_file or primary_file.deleted_on or primary_file.target != self:
return True
return False
@property
def has_submitted_preprint(self):
return self.machine_state != DefaultStates.INITIAL.value
@property
def deep_url(self):
# Required for GUID routing
return '/preprints/{}/'.format(self._id)
@property
def url(self):
if (self.provider.domain_redirect_enabled and self.provider.domain) or self.provider._id == 'osf':
return '/{}/'.format(self._id)
return '/preprints/{}/{}/'.format(self.provider._id, self._id)
@property
def absolute_url(self):
return urlparse.urljoin(
self.provider.domain if self.provider.domain_redirect_enabled else settings.DOMAIN,
self.url
)
@property
def absolute_api_v2_url(self):
path = '/preprints/{}/'.format(self._id)
return api_v2_url(path)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def linked_nodes_self_url(self):
return self.absolute_api_v2_url + 'relationships/node/'
@property
def admin_contributor_ids(self):
# Overrides ContributorMixin
return self.get_group('admin').user_set.filter(is_active=True).values_list('guids___id', flat=True)
@property
def csl(self): # formats node information into CSL format for citation parsing
"""a dict in CSL-JSON schema
For details on this schema, see:
https://github.com/citation-style-language/schema#csl-json-schema
"""
csl = {
'id': self._id,
'title': sanitize.unescape_entities(self.title),
'author': [
contributor.csl_name(self._id) # method in auth/model.py which parses the names of authors
for contributor in self.visible_contributors
],
'type': 'webpage',
'URL': self.display_absolute_url,
'publisher': 'OSF Preprints' if self.provider.name == 'Open Science Framework' else self.provider.name
}
article_doi = self.article_doi
preprint_doi = self.preprint_doi
if article_doi:
csl['DOI'] = article_doi
elif preprint_doi and self.is_published and self.preprint_doi_created:
csl['DOI'] = preprint_doi
if self.date_published:
csl['issued'] = datetime_to_csl(self.date_published)
return csl
def web_url_for(self, view_name, _absolute=False, _guid=False, *args, **kwargs):
return web_url_for(view_name, pid=self._id,
_absolute=_absolute, _guid=_guid, *args, **kwargs)
def api_url_for(self, view_name, _absolute=False, *args, **kwargs):
return api_url_for(view_name, pid=self._id, _absolute=_absolute, *args, **kwargs)
def get_absolute_url(self):
return self.absolute_api_v2_url
def add_log(self, action, params, auth, foreign_user=None, log_date=None, save=True, request=None):
user = None
if auth:
user = auth.user
elif request:
user = request.user
params['preprint'] = params.get('preprint') or self._id
log = PreprintLog(
action=action, user=user, foreign_user=foreign_user,
params=params, preprint=self
)
log.save()
if self.logs.count() == 1:
self.last_logged = log.created.replace(tzinfo=pytz.utc)
else:
self.last_logged = self.logs.first().created
if save:
self.save()
if user:
increment_user_activity_counters(user._primary_key, action, log.created.isoformat())
return log
def can_view_files(self, auth=None):
if self.is_retracted:
return False
if not auth or not auth.user:
return self.verified_publishable
else:
return self.can_view(auth=auth)
# Overrides ContributorMixin entirely
# TODO: When nodes user guardian as well, move this to ContributorMixin
def has_permission(self, user, permission):
"""Check whether user has permission.
:param User user: User to test
:param str permission: Required permission
:returns: User has required permission
"""
if not user:
return False
return user.has_perm('{}_preprint'.format(permission), self)
# Overrides ContributorMixin entirely
# TODO: When nodes user guardian as well, move this to ContributorMixin
def set_permissions(self, user, permissions, validate=True, save=False):
# Ensure that user's permissions cannot be lowered if they are the only admin
if isinstance(user, PreprintContributor):
user = user.user
if validate and (self.has_permission(user, 'admin') and 'admin' not in permissions):
if self.get_group('admin').user_set.count() <= 1:
raise PreprintStateError('Must have at least one registered admin contributor')
self.clear_permissions(user)
self.add_permission(user, permissions)
if save:
self.save()
def get_addons(self):
# Override for ContributorMixin, Preprints don't have addons
return []
def get_subjects(self):
ret = []
for subj_list in self.subject_hierarchy:
subj_hierarchy = []
for subj in subj_list:
if subj:
subj_hierarchy += ({'id': subj._id, 'text': subj.text}, )
if subj_hierarchy:
ret.append(subj_hierarchy)
return ret
def set_subjects(self, preprint_subjects, auth, log=True):
if not self.has_permission(auth.user, 'write'):
raise PermissionsError('Must have admin or write permissions to change a preprint\'s subjects.')
old_subjects = list(self.subjects.values_list('id', flat=True))
self.subjects.clear()
for subj_list in preprint_subjects:
subj_hierarchy = []
for s in subj_list:
subj_hierarchy.append(s)
if subj_hierarchy:
validate_subject_hierarchy(subj_hierarchy)
for s_id in subj_hierarchy:
self.subjects.add(Subject.load(s_id))
if log:
self.add_log(
action=PreprintLog.SUBJECTS_UPDATED,
params={
'subjects': list(self.subjects.values('_id', 'text')),
'old_subjects': list(Subject.objects.filter(id__in=old_subjects).values('_id', 'text')),
'preprint': self._id
},
auth=auth,
save=False,
)
self.save(old_subjects=old_subjects)
def set_primary_file(self, preprint_file, auth, save=False):
if not self.root_folder:
raise PreprintStateError('Preprint needs a root folder.')
if not self.has_permission(auth.user, 'write'):
raise PermissionsError('Must have admin or write permissions to change a preprint\'s primary file.')
if preprint_file.target != self or preprint_file.provider != 'osfstorage':
raise ValueError('This file is not a valid primary file for this preprint.')
existing_file = self.primary_file
self.primary_file = preprint_file
self.primary_file.move_under(self.root_folder)
self.primary_file.save()
# only log if updating the preprint file, not adding for the first time
if existing_file:
self.add_log(
action=PreprintLog.FILE_UPDATED,
params={
'preprint': self._id,
'file': self.primary_file._id
},
auth=auth,
save=False
)
if save:
self.save()
update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['primary_file'])
def set_published(self, published, auth, save=False):
if not self.has_permission(auth.user, 'admin'):
raise PermissionsError('Only admins can publish a preprint.')
if self.is_published and not published:
raise ValueError('Cannot unpublish preprint.')
self.is_published = published
if published:
if not self.title:
raise ValueError('Preprint needs a title; cannot publish.')
if not (self.primary_file and self.primary_file.target == self):
raise ValueError('Preprint is not a valid preprint; cannot publish.')
if not self.provider:
raise ValueError('Preprint provider not specified; cannot publish.')
if not self.subjects.exists():
raise ValueError('Preprint must have at least one subject to be published.')
self.date_published = timezone.now()
# For legacy preprints, not logging
self.set_privacy('public', log=False, save=False)
# In case this provider is ever set up to use a reviews workflow, put this preprint in a sensible state
self.machine_state = ReviewStates.ACCEPTED.value
self.date_last_transitioned = self.date_published
# This preprint will have a tombstone page when it's withdrawn.
self.ever_public = True
self.add_log(
action=PreprintLog.PUBLISHED,
params={
'preprint': self._id
},
auth=auth,
save=False,
)
self._send_preprint_confirmation(auth)
if save:
self.save()
def set_preprint_license(self, license_detail, auth, save=False):
license_record, license_changed = set_license(self, license_detail, auth, node_type='preprint')
if license_changed:
self.add_log(
action=PreprintLog.CHANGED_LICENSE,
params={
'preprint': self._id,
'new_license': license_record.node_license.name
},
auth=auth,
save=False
)
if save:
self.save()
update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['license'])
def set_identifier_values(self, doi, save=False):
self.set_identifier_value('doi', doi)
self.preprint_doi_created = timezone.now()
if save:
self.save()
def get_doi_client(self):
if settings.CROSSREF_URL:
if self.provider._id == 'ecsarxiv':
return ECSArXivCrossRefClient(base_url=settings.CROSSREF_URL)
return CrossRefClient(base_url=settings.CROSSREF_URL)
else:
return None
def save(self, *args, **kwargs):
first_save = not bool(self.pk)
saved_fields = self.get_dirty_fields() or []
old_subjects = kwargs.pop('old_subjects', [])
if saved_fields:
request, user_id = get_request_and_user_id()
request_headers = string_type_request_headers(request)
user = OSFUser.load(user_id)
if user:
self.check_spam(user, saved_fields, request_headers)
if not first_save and ('ever_public' in saved_fields and saved_fields['ever_public']):
raise ValidationError('Cannot set "ever_public" to False')
ret = super(Preprint, self).save(*args, **kwargs)
if first_save:
self._set_default_region()
self.update_group_permissions()
self._add_creator_as_contributor()
if (not first_save and 'is_published' in saved_fields) or self.is_published:
update_or_enqueue_on_preprint_updated(preprint_id=self._id, old_subjects=old_subjects, saved_fields=saved_fields)
return ret
def update_or_enqueue_on_resource_updated(self, user_id, first_save, saved_fields):
# Needed for ContributorMixin
return update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=saved_fields)
def _set_default_region(self):
user_settings = self.creator.get_addon('osfstorage')
self.region_id = user_settings.default_region_id
self.save()
def _add_creator_as_contributor(self):
self.add_contributor(self.creator, permissions='admin', visible=True, log=False, save=True)
def _send_preprint_confirmation(self, auth):
# Send creator confirmation email
recipient = self.creator
event_type = utils.find_subscription_type('global_reviews')
user_subscriptions = get_user_subscriptions(recipient, event_type)
if self.provider._id == 'osf':
logo = settings.OSF_PREPRINTS_LOGO
else:
logo = self.provider._id
context = {
'domain': settings.DOMAIN,
'reviewable': self,
'workflow': self.provider.reviews_workflow,
'provider_url': '{domain}preprints/{provider_id}'.format(
domain=self.provider.domain or settings.DOMAIN,
provider_id=self.provider._id if not self.provider.domain else '').strip('/'),
'provider_contact_email': self.provider.email_contact or settings.OSF_CONTACT_EMAIL,
'provider_support_email': self.provider.email_support or settings.OSF_SUPPORT_EMAIL,
'no_future_emails': user_subscriptions['none'],
'is_creator': True,
'provider_name': 'OSF Preprints' if self.provider.name == 'Open Science Framework' else self.provider.name,
'logo': logo,
}
mails.send_mail(
recipient.username,
mails.REVIEWS_SUBMISSION_CONFIRMATION,
mimetype='html',
user=recipient,
**context
)
# FOLLOWING BEHAVIOR NOT SPECIFIC TO PREPRINTS
@property
def all_tags(self):
"""Return a queryset containing all of this node's tags (incl. system tags)."""
# Tag's default manager only returns non-system tags, so we can't use self.tags
return Tag.all_tags.filter(preprint_tagged=self)
@property
def system_tags(self):
"""The system tags associated with this node. This currently returns a list of string
names for the tags, for compatibility with v1. Eventually, we can just return the
QuerySet.
"""
return self.all_tags.filter(system=True).values_list('name', flat=True)
# Override Taggable
def add_tag_log(self, tag, auth):
self.add_log(
action=PreprintLog.TAG_ADDED,
params={
'preprint': self._id,
'tag': tag.name
},
auth=auth,
save=False
)
# Override Taggable
def on_tag_added(self, tag):
update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['tags'])
def remove_tag(self, tag, auth, save=True):
if not tag:
raise InvalidTagError
elif not self.tags.filter(name=tag).exists():
raise TagNotFoundError
else:
tag_obj = Tag.objects.get(name=tag)
self.tags.remove(tag_obj)
self.add_log(
action=PreprintLog.TAG_REMOVED,
params={
'preprint': self._id,
'tag': tag,
},
auth=auth,
save=False,
)
if save:
self.save()
update_or_enqueue_on_preprint_updated(preprint_id=self._id, saved_fields=['tags'])
return True
def set_supplemental_node(self, node, auth, save=False):
if not self.has_permission(auth.user, 'write'):
raise PermissionsError('You must have write permissions to set a supplemental node.')
if not node.has_permission(auth.user, 'write'):
raise PermissionsError('You must have write permissions on the supplemental node to attach.')
if node.is_deleted:
raise ValueError('Cannot attach a deleted project to a preprint.')
self.node = node
self.add_log(
action=PreprintLog.SUPPLEMENTAL_NODE_ADDED,
params={
'preprint': self._id,
'node': self.node._id,
},
auth=auth,
save=False,
)
if save:
self.save()
def unset_supplemental_node(self, auth, save=False):
if not self.has_permission(auth.user, 'write'):
raise PermissionsError('You must have write permissions to set a supplemental node.')
current_node_id = self.node._id if self.node else None
self.node = None
self.add_log(
action=PreprintLog.SUPPLEMENTAL_NODE_REMOVED,
params={
'preprint': self._id,
'node': current_node_id
},
auth=auth,
save=False,
)
if save:
self.save()
def set_title(self, title, auth, save=False):
"""Set the title of this Node and log it.
:param str title: The new title.
:param auth: All the auth information including user, API key.
"""
if not self.has_permission(auth.user, 'write'):
raise PermissionsError('Must have admin or write permissions to edit a preprint\'s title.')
# Called so validation does not have to wait until save.
validate_title(title)
original_title = self.title
new_title = sanitize.strip_html(title)
# Title hasn't changed after sanitzation, bail out
if original_title == new_title:
return False
self.title = new_title
self.add_log(
action=PreprintLog.EDITED_TITLE,
params={
'preprint': self._id,
'title_new': self.title,
'title_original': original_title,
},
auth=auth,
save=False,
)
if save:
self.save()
return None
def set_description(self, description, auth, save=False):
"""Set the description and log the event.
:param str description: The new description
:param auth: All the auth informtion including user, API key.
:param bool save: Save self after updating.
"""
if not self.has_permission(auth.user, 'write'):
raise PermissionsError('Must have admin or write permissions to edit a preprint\'s title.')
original = self.description
new_description = sanitize.strip_html(description)
if original == new_description:
return False
self.description = new_description
self.add_log(
action=PreprintLog.EDITED_DESCRIPTION,
params={
'preprint': self._id,
'description_new': self.description,
'description_original': original
},
auth=auth,
save=False,
)
if save:
self.save()
return None
def get_spam_fields(self, saved_fields):
return self.SPAM_CHECK_FIELDS if self.is_published and 'is_published' in saved_fields else self.SPAM_CHECK_FIELDS.intersection(
saved_fields)
def set_privacy(self, permissions, auth=None, log=True, save=True, check_addons=False):
"""Set the permissions for this preprint - mainly for spam purposes.
:param permissions: A string, either 'public' or 'private'
:param auth: All the auth information including user, API key.
:param bool log: Whether to add a NodeLog for the privacy change.
:param bool meeting_creation: Whether this was created due to a meetings email.
:param bool check_addons: Check and collect messages for addons?
"""
if auth and not self.has_permission(auth.user, 'write'):
raise PermissionsError('Must have admin or write permissions to change privacy settings.')
if permissions == 'public' and not self.is_public:
if self.is_spam or (settings.SPAM_FLAGGED_MAKE_NODE_PRIVATE and self.is_spammy):
# TODO: Should say will review within a certain agreed upon time period.
raise PreprintStateError('This preprint has been marked as spam. Please contact the help desk if you think this is in error.')
self.is_public = True
elif permissions == 'private' and self.is_public:
self.is_public = False
else:
return False
if log:
action = PreprintLog.MADE_PUBLIC if permissions == 'public' else PreprintLog.MADE_PRIVATE
self.add_log(
action=action,
params={
'preprint': self._id,
},
auth=auth,
save=False,
)
if save:
self.save()
return True
def can_view(self, auth):
if not auth.user:
return self.verified_publishable
return (self.verified_publishable or
(self.is_public and auth.user.has_perm('view_submissions', self.provider)) or
self.has_permission(auth.user, 'admin') or
(self.is_contributor(auth.user) and self.has_submitted_preprint)
)
def can_edit(self, auth=None, user=None):
"""Return if a user is authorized to edit this preprint.
Must specify one of (`auth`, `user`).
:param Auth auth: Auth object to check
:param User user: User object to check
:returns: Whether user has permission to edit this node.
"""
if not auth and not user:
raise ValueError('Must pass either `auth` or `user`')
if auth and user:
raise ValueError('Cannot pass both `auth` and `user`')
user = user or auth.user
return (
user and ((self.has_permission(user, 'write') and self.has_submitted_preprint) or self.has_permission(user, 'admin'))
)
def belongs_to_permission_group(self, user, permission):
# Override for contributormixin
return self.get_group(permission).user_set.filter(id=user.id).exists()
# Overrides ContributorMixin entirely, since Preprints use guardian permissions.
# TODO: When nodes user guardian as well, move this to ContributorMixin
def add_permission(self, user, permission, save=False):
"""Grant permission to a user.
:param User user: User to grant permission to
:param str permission: Permission to grant
:param bool save: Save changes
:raises: ValueError if user already has permission
"""
if not self.belongs_to_permission_group(user, permission):
permission_group = self.get_group(permission)
permission_group.user_set.add(user)
else:
raise ValueError('User already has permission {0}'.format(permission))
if save:
self.save()
# Overrides ContributorMixin entirely, since Preprints use guardian permissions.
# TODO: When nodes user guardian as well, move this to ContributorMixin
def remove_permission(self, user, permission, save=False):
"""Revoke permission from a user.
:param User user: User to revoke permission from
:param str permission: Permission to revoke
:param bool save: Save changes
:raises: ValueError if user does not have permission
"""
if self.belongs_to_permission_group(user, permission):
permission_group = self.get_group(permission)
permission_group.user_set.remove(user)
else:
raise ValueError('User does not have permission {0}'.format(permission))
if save:
self.save()
# TODO: When nodes user guardian as well, move this to ContributorMixin
def clear_permissions(self, user):
for name in self.groups.keys():
if user.groups.filter(name=self.get_group(name)).exists():
self.remove_permission(user, name)
def expand_permissions(self, permission=None):
# Property needed for ContributorMixin
# Preprint contributor methods don't require a list ['read', 'write'], they
# just use highest permission, 'write'
return permission
def get_contributor_order(self):
# Method needed for ContributorMixin
return self.get_preprintcontributor_order()
def set_contributor_order(self, contributor_ids):
# Method needed for ContributorMixin
return self.set_preprintcontributor_order(contributor_ids)
# Overrides replace_contributor since users needed to be added to groups
def replace_contributor(self, old, new):
res = super(Preprint, self).replace_contributor(old, new)
for group_name in self.groups.keys():
if self.belongs_to_permission_group(old, group_name):
self.get_group(group_name).user_set.remove(old)
self.get_group(group_name).user_set.add(new)
return res
# Overrides ContributorMixin since this query is constructed differently
def _get_admin_contributors_query(self, users):
return PreprintContributor.objects.select_related('user').filter(
preprint=self,
user__in=users,
user__is_active=True,
user__groups=(self.get_group('admin').id))
@classmethod
def bulk_update_search(cls, preprints, index=None):
from website import search
try:
serialize = functools.partial(search.search.update_preprint, index=index, bulk=True, async_update=False)
search.search.bulk_update_nodes(serialize, preprints, index=index)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def update_search(self):
from website import search
try:
search.search.update_preprint(self, bulk=False, async_update=True)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
def serialize_waterbutler_settings(self, provider_name=None):
"""
Since preprints don't have addons, this method has been pulled over from the
OSFStorage addon
"""
return dict(Region.objects.get(id=self.region_id).waterbutler_settings, **{
'nid': self._id,
'rootId': self.root_folder._id,
'baseUrl': api_url_for(
'osfstorage_get_metadata',
guid=self._id,
_absolute=True,
_internal=True
)
})
def serialize_waterbutler_credentials(self, provider_name=None):
"""
Since preprints don't have addons, this method has been pulled over from the
OSFStorage addon
"""
return Region.objects.get(id=self.region_id).waterbutler_credentials
def create_waterbutler_log(self, auth, action, payload):
"""
Since preprints don't have addons, this method has been pulled over from the
OSFStorage addon
"""
metadata = payload['metadata']
user = auth.user
params = {
'preprint': self._id,
'path': metadata['materialized'],
}
if (metadata['kind'] != 'folder'):
url = self.web_url_for(
'addon_view_or_download_file',
guid=self._id,
path=metadata['path'],
provider='osfstorage'
)
params['urls'] = {'view': url, 'download': url + '?action=download'}
self.add_log(
'osf_storage_{0}'.format(action),
auth=Auth(user),
params=params
)
@receiver(post_save, sender=Preprint)
def create_file_node(sender, instance, **kwargs):
if instance.root_folder:
return
# Note: The "root" node will always be "named" empty string
root_folder = OsfStorageFolder(name='', target=instance, is_root=True)
root_folder.save()
|
pattisdr/osf.io
|
osf/models/preprint.py
|
Python
|
apache-2.0
| 40,401 | 0.002401 |
# coding=utf-8
__author__ = 'mic'
from django import forms
from django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.auth.forms import UserCreationForm
from public.models import User
class UserSignupForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'duplicate_username': _("A user with that username already exists."),
'password_mismatch': _("The two password fields didn't match."),
}
username = forms.RegexField(label=_("Username"), max_length=30,
regex=r'^[\w.@+-]+$',
help_text=_("Required. 30 characters or fewer. Letters, digits and "
"@/./+/-/_ only."),
error_messages={
'invalid': _("This value may contain only letters, numbers and "
"@/./+/-/_ characters.")})
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."))
invite_code = forms.RegexField(label=_("Invention code"),
regex=r'^[^\s]{8}$',
help_text=_("Required. 8 characters: digital, Letters, digits or symbols"))
class Meta:
model = User
fields = ("username",)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(
self.error_messages['duplicate_username'],
code='duplicate_username',
)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserSignupForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
|
micfan/dinner
|
src/public/forms.py
|
Python
|
mit
| 2,640 | 0.001515 |
"""
Main organisation:
- L{amcat.scripts.tools} contains helper classes that are used by the scripts
- L{amcat.scripts.searchscripts} contains scripts that search the index or the database
- L{amcat.scripts.processors} contains scripts that process the input of a script
- L{amcat.scripts.output} contains scripts that output script results in various formats, such as csv and html.
- L{amcat.scripts.to_be_updated} contains legacy scripts that still have to be updated.
"""
|
amcat/amcat
|
amcat/scripts/__init__.py
|
Python
|
agpl-3.0
| 482 | 0.008299 |
# coding: utf-8
import base64
import pytest
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from tests_silverlight import SilverlightTestCase
By.XNAME = 'xname'
class TestGetCommands(SilverlightTestCase):
"""
Test GET commands that do not change anything in app, meaning they can all be run in one session.
"""
def test_get_current_window_handle(self):
"""
GET /session/:sessionId/window_handle Retrieve the current window handle.
"""
assert 'current' == self.driver.current_window_handle
def test_screenshot(self):
"""
GET /session/:sessionId/screenshot Take a screenshot of the current page.
"""
assert self.driver.get_screenshot_as_png(), 'Screenshot should not be empty'
def test_get_window_size(self):
"""
GET /session/:sessionId/window/:windowHandle/size Get the size of the specified window.
"""
size = self.driver.get_window_size()
assert {'height': 800, 'width': 480} == size
def test_get_page_source(self):
"""
GET /session/:sessionId/source Get the current page source (as xml).
"""
from xml.etree import ElementTree
source = self.driver.page_source
root = ElementTree.fromstring(source.encode('utf-8'))
visual_root = next(root.iterfind('*'))
assert 'System.Windows.Controls.Border' == visual_root.tag
@pytest.mark.parametrize(("by", "value"), [
(By.ID, 'MyTextBox'),
(By.NAME, 'NonUniqueName'),
(By.CLASS_NAME, 'System.Windows.Controls.TextBox'),
(By.TAG_NAME, 'System.Windows.Controls.TextBox'),
], ids=['by id', 'by name', 'by class name', 'by tag name'])
def test_find_element(self, by, value):
"""
POST /session/:sessionId/element Search for an element on the page, starting from the document root.
"""
try:
self.driver.find_element(by, value)
except NoSuchElementException as e:
pytest.fail(e)
@pytest.mark.parametrize(("by", "value", "expected_count"), [
(By.NAME, 'NonUniqueName', 2),
(By.TAG_NAME, 'System.Windows.Controls.TextBlock', 30),
], ids=['by name', 'by class name'])
def test_find_elements(self, by, value, expected_count):
"""
POST /session/:sessionId/elements Search for multiple elements on the page, starting from the document root.
"""
assert expected_count == len(self.driver.find_elements(by, value))
def test_find_child_element(self):
"""
POST /session/:sessionId/element/:id/element
Search for an element on the page, starting from the identified element.
"""
parent_element = self.driver.find_element_by_class_name('TestApp.MainPage')
try:
parent_element.find_element_by_id('MyTextBox')
except NoSuchElementException as e:
pytest.fail(e)
def test_find_child_elements(self):
"""
POST /session/:sessionId/element/:id/elements
Search for multiple elements on the page, starting from the identified element.
"""
parent_element = self.driver.find_element_by_id('MyListBox')
elements = parent_element.find_elements_by_class_name('System.Windows.Controls.TextBlock')
assert 25 == len(elements)
def test_get_element_text(self):
"""
GET /session/:sessionId/element/:id/text Returns the visible text for the element.
"""
text = self.driver.find_element_by_id('SetButton').text
assert "Set 'CARAMBA' text to TextBox" == text
@pytest.mark.parametrize(("attr_name", "expected_value"), [('Width', '400', )])
def test_get_element_attribute(self, attr_name, expected_value):
"""
GET /session/:sessionId/element/:id/attribute/:name Get the value of an element's attribute.
"""
element = self.driver.find_element_by_id('MyTextBox')
value = element.get_attribute(attr_name)
assert expected_value == value
def test_get_element_attribute_dot_syntax(self):
element = self.driver.find_element_by_id('SetButton')
value = element.get_attribute('Background.Color')
assert '#00FFFFFF' == value
@pytest.mark.parametrize(("automation_id", "expected_value"), [
('MyTextBox', True),
])
def test_is_element_displayed(self, automation_id, expected_value):
"""
GET /session/:sessionId/element/:id/displayed Determine if an element is currently displayed.
"""
is_displayed = self.driver.find_element_by_id(automation_id).is_displayed()
assert expected_value == is_displayed
def test_get_element_location(self):
"""
GET /session/:sessionId/element/:id/location Determine an element's location on the page.
"""
location = self.driver.find_element_by_id('MyTextBox').location
assert {'x': 240, 'y': 269} == location
def test_get_element_size(self):
size = self.driver.find_element_by_id('MyTextBox').size
assert {'height': 100, 'width': 400} == size
def test_get_element_rect(self):
rect = self.driver.find_element_by_id('MyTextBox').rect
assert {'x': 40, 'y': 219, 'height': 100, 'width': 400} == rect
def test_get_orientation(self):
"""
GET /session/:sessionId/orientation Get the current browser orientation.
Note: we lost orientation support in universal driver, atm it always returns portrait
"""
# TODO: rewrite and parametrize test to test different orientations
assert 'PORTRAIT' == self.driver.orientation
@pytest.mark.parametrize(("name", "expected_value"), [
('May', True),
('June', True),
('November', False),
])
def test_is_displayed(self, name, expected_value):
element = self.driver.find_element_by_name(name)
assert expected_value == element.is_displayed()
def test_file_ops(self):
encoding = 'utf-8'
with open(__file__, encoding=encoding) as f:
encoded = base64.b64encode(f.read().encode(encoding)).decode(encoding)
self.driver.push_file(r"test\sample.dat", encoded)
data = self.driver.pull_file(r"test\sample.dat")
assert encoded == data
def test_execute_script_invoke_method_echo_with_arg(self):
rv = self.driver.execute_script('mobile: invokeMethod', 'TestApp.AutomationApi', 'Echo', 'blah blah')
assert 'blah blah' == rv
def test_execute_script_invoke_method_complex_return_value_no_args(self):
expected = {u'Date': u'1985-10-21T01:20:00', u'Text': u'Flux', u'Value': 3}
rv = self.driver.execute_script('mobile: invokeMethod', 'TestApp.AutomationApi', 'ReturnStubState')
assert expected == rv
class TestExecuteScript(SilverlightTestCase):
__shared_session__ = False
@pytest.mark.parametrize("command_alias", ["automation: InvokePattern.Invoke"])
def test_automation_invoke(self, command_alias):
self.driver.find_element_by_id('MyTextBox').send_keys('')
element = self.driver.find_element_by_id('SetButton')
self.driver.execute_script(command_alias, element)
assert 'CARAMBA' == self.driver.find_element_by_id('MyTextBox').text
@pytest.mark.parametrize("command_alias", ["automation: ScrollPattern.Scroll"])
def test_automation_scroll(self, command_alias):
list_box = self.driver.find_element_by_id('MyListBox')
list_item = list_box.find_element_by_name('November')
start_location = list_item.location
scroll_info = {"v": "smallIncrement", "count": 10}
self.driver.execute_script(command_alias, list_box, scroll_info)
end_location = list_item.location
assert (end_location['y'] - start_location['y']) < 0
class TestBasicInput(SilverlightTestCase):
__shared_session__ = False
def test_send_keys_to_element(self):
"""
POST /session/:sessionId/element/:id/value Send a sequence of key strokes to an element.
TODO: test magic keys
"""
actual_input = 'Some test string'
element = self.driver.find_element_by_id('MyTextBox')
element.send_keys(actual_input)
assert actual_input == element.text
def test_click_element(self):
element = self.driver.find_element_by_id('SetButton')
element.click()
assert 'CARAMBA' == self.driver.find_element_by_id('MyTextBox').text
def test_app_bar_item_invoke(self):
element = self.driver.find_element_by_id('MyTextBox')
self.driver.execute_script("mobile: invokeAppBarItem", 'iconButton', 1)
assert 'Button 2' == element.text
self.driver.execute_script("mobile: invokeAppBarItem", 'menuItem', 0)
assert 'MenuItem 1' == element.text
|
2gis/Winium.StoreApps
|
Winium/TestApp.Test/py-functional/tests_silverlight/test_commands.py
|
Python
|
mpl-2.0
| 8,916 | 0.002916 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
import hooks
import manager.hooks
import technician.hooks
urlpatterns = patterns('',
# general hooks
(r'^$', hooks.index),
(r'^api/$', hooks.api_index),
(r'^api/export/$', hooks.export_csv),
(r'^api/login/$', hooks.login_user),
(r'^api/logout/$', hooks.logout_user),
(r'^api/notifications/(start/(?P<start>[0-9]+)/)?(end/(?P<end>[0-9]+)/)?$', hooks.list_notifications),
(r'^api/sensors/$', hooks.list_sensors),
(r'^api/settings/$', hooks.list_settings),
(r'^api/status/$', hooks.status),
# technician hooks
(r'^api/configure/$', technician.hooks.configure),
(r'^api/data/monthly/$', technician.hooks.list_sensor_values),
(r'^api/data/yearly/$', technician.hooks.list_sensor_values, {'interval': 'year'}),
(r'^api/forecast/$', technician.hooks.forecast),
(r'^api/forward/$', technician.hooks.forward),
(r'^api/live/$', technician.hooks.live_data),
(r'^api/manage/thresholds/$', technician.hooks.handle_threshold),
(r'^api/settings/tunable/$', technician.hooks.get_tunable_device_configurations),
(r'^api/snippets/$', technician.hooks.handle_snippets),
(r'^api/code/$', technician.hooks.handle_code),
(r'^api/start/$', technician.hooks.start_device),
(r'^api/statistics/$', technician.hooks.get_statistics),
(r'^api/statistics/monthly/$', technician.hooks.get_monthly_statistics),
(r'^api/thresholds/$', technician.hooks.list_thresholds),
# manager hooks
(r'^api/avgs/(sensor/(?P<sensor_id>[0-9]+)/)?(year/(?P<year>[0-9]+)/)?$', manager.hooks.get_avgs),
(r'^api/balance/total/((?P<year>\d+)/)?((?P<month>\d+)/)?$', manager.hooks.get_total_balance),
(r'^api/balance/total/latest/$', manager.hooks.get_latest_total_balance),
(r'^api/history/$', manager.hooks.get_sensorvalue_history_list),
(r'^api/loads/$', manager.hooks.get_daily_loads),
(r'^api/sensor/((?P<sensor_id>\d+)/)?$', manager.hooks.get_detailed_sensor_values),
(r'^api/sums/(sensor/(?P<sensor_id>[0-9]+)/)?(year/(?P<year>[0-9]+)/)?$', manager.hooks.get_sums),
url(r'^admin/', include(admin.site.urls)),
)
|
SEC-i/ecoControl
|
server/urls.py
|
Python
|
mit
| 2,212 | 0.005425 |
# -*- coding: utf-8 -*-
# Copyright 2011 Takeshi KOMIYA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pkg_resources import iter_entry_points
node_handlers = []
def load(plugins, diagram, **kwargs):
for name in plugins:
for ep in iter_entry_points('blockdiag_plugins', name):
module = ep.load()
if hasattr(module, 'setup'):
module.setup(module, diagram, **kwargs)
break
else:
msg = "WARNING: unknown plugin: %s\n" % name
raise AttributeError(msg)
def install_node_handler(handler):
if handler not in node_handlers:
node_handlers.append(handler)
def fire_node_event(node, name, *args):
method = "on_" + name
for handler in node_handlers:
getattr(handler, method)(node, *args)
class NodeHandler(object):
def __init__(self, diagram, **kwargs):
self.diagram = diagram
def on_created(self, node):
pass
def on_attr_changed(self, node, attr):
pass
|
aboyett/blockdiag
|
src/blockdiag/plugins/__init__.py
|
Python
|
apache-2.0
| 1,527 | 0 |
# -*- coding: utf-8 -*-
import unittest
from wechatpy.replies import TextReply, create_reply
class CreateReplyTestCase(unittest.TestCase):
def test_create_reply_with_text_not_render(self):
text = "test"
reply = create_reply(text, render=False)
self.assertEqual("text", reply.type)
self.assertEqual(text, reply.content)
reply.render()
def test_create_reply_with_text_render(self):
text = "test"
reply = create_reply(text, render=True)
self.assertTrue(isinstance(reply, str))
def test_create_reply_with_message(self):
from wechatpy.messages import TextMessage
msg = TextMessage(
{
"FromUserName": "user1",
"ToUserName": "user2",
}
)
reply = create_reply("test", msg, render=False)
self.assertEqual("user1", reply.target)
self.assertEqual("user2", reply.source)
reply.render()
def test_create_reply_with_reply(self):
_reply = TextReply(content="test")
reply = create_reply(_reply, render=False)
self.assertEqual(_reply, reply)
reply.render()
def test_create_reply_with_articles(self):
articles = [
{
"title": "test 1",
"description": "test 1",
"image": "http://www.qq.com/1.png",
"url": "http://www.qq.com/1",
},
{
"title": "test 2",
"description": "test 2",
"image": "http://www.qq.com/2.png",
"url": "http://www.qq.com/2",
},
{
"title": "test 3",
"description": "test 3",
"image": "http://www.qq.com/3.png",
"url": "http://www.qq.com/3",
},
]
reply = create_reply(articles, render=False)
self.assertEqual("news", reply.type)
reply.render()
def test_create_reply_with_more_than_ten_articles(self):
articles = [
{
"title": "test 1",
"description": "test 1",
"image": "http://www.qq.com/1.png",
"url": "http://www.qq.com/1",
},
{
"title": "test 2",
"description": "test 2",
"image": "http://www.qq.com/2.png",
"url": "http://www.qq.com/2",
},
{
"title": "test 3",
"description": "test 3",
"image": "http://www.qq.com/3.png",
"url": "http://www.qq.com/3",
},
{
"title": "test 4",
"description": "test 4",
"image": "http://www.qq.com/4.png",
"url": "http://www.qq.com/4",
},
{
"title": "test 5",
"description": "test 5",
"image": "http://www.qq.com/5.png",
"url": "http://www.qq.com/5",
},
{
"title": "test 6",
"description": "test 6",
"image": "http://www.qq.com/6.png",
"url": "http://www.qq.com/6",
},
{
"title": "test 7",
"description": "test 7",
"image": "http://www.qq.com/7.png",
"url": "http://www.qq.com/7",
},
{
"title": "test 8",
"description": "test 8",
"image": "http://www.qq.com/8.png",
"url": "http://www.qq.com/8",
},
{
"title": "test 9",
"description": "test 9",
"image": "http://www.qq.com/9.png",
"url": "http://www.qq.com/9",
},
{
"title": "test 10",
"description": "test 10",
"image": "http://www.qq.com/10.png",
"url": "http://www.qq.com/10",
},
{
"title": "test 11",
"description": "test 11",
"image": "http://www.qq.com/11.png",
"url": "http://www.qq.com/11",
},
]
self.assertRaises(AttributeError, create_reply, articles)
def test_create_empty_reply(self):
from wechatpy.replies import EmptyReply
reply = create_reply("")
self.assertTrue(isinstance(reply, EmptyReply))
reply = create_reply(None)
self.assertTrue(isinstance(reply, EmptyReply))
reply = create_reply(False)
self.assertTrue(isinstance(reply, EmptyReply))
|
jxtech/wechatpy
|
tests/test_create_reply.py
|
Python
|
mit
| 4,741 | 0 |
from rest_framework import generics
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.reverse import reverse
from .models import Feedback
from .serializers import *
class FeedbackListView(generics.ListCreateAPIView):
# permission_classes = IsAuthenticatedOrReadOnly,
queryset = Feedback.objects.all()
# paginate_by = 10
paginate_by_param = 'page_size'
serializer_class = FeedbackSerializer
class FeedbackCategoryListView(generics.ListCreateAPIView):
queryset = FeedbackCategory.objects.all()
serializer_class = FeedbackCategorySerializer
class Base(APIView):
def get(self, request):
result = {
"Feedbacks": reverse('feedback-list', request=request),
"Feedback Categories": reverse('feedbackcategory-list', request=request)
}
return Response(result)
|
mmilaprat/policycompass-services
|
apps/feedbackmanager/views.py
|
Python
|
agpl-3.0
| 891 | 0.001122 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import functools
import hypothesis
from hypothesis import given, settings, HealthCheck
import hypothesis.strategies as st
import numpy as np
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.serialized_test.serialized_test_util as serial
class TestAdadelta(serial.SerializedTestCase):
@staticmethod
def ref_adadelta(param_in,
mom_in,
mom_delta_in,
grad, lr,
epsilon,
decay,
using_fp16=False):
param_in_f32 = param_in
mom_in_f32 = mom_in
mom_delta_in_f32 = mom_delta_in
if(using_fp16):
param_in_f32 = param_in.astype(np.float32)
mom_in_f32 = mom_in.astype(np.float32)
mom_delta_in_f32 = mom_delta_in.astype(np.float32)
mom_out = decay * mom_in_f32 + (1.0 - decay) * grad * grad
new_grad = (np.sqrt(mom_delta_in_f32 + epsilon) /
np.sqrt(mom_out + epsilon)) * grad
param_out = param_in_f32 + lr * new_grad
mom_delta_out = decay * mom_delta_in_f32 + (1.0 - decay
) * new_grad * new_grad
if(using_fp16):
return (param_out.astype(np.float16), mom_out.astype(np.float16),
mom_delta_out.astype(np.float16))
else:
return (param_out.astype(np.float32), mom_out.astype(np.float32),
mom_delta_out.astype(np.float32))
@serial.given(inputs=hu.tensors(n=4),
lr=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
epsilon=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
decay=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
**hu.gcs)
def test_adadelta(self, inputs, lr, epsilon, decay, gc, dc):
param, moment, moment_delta, grad = inputs
lr = np.array([lr], dtype=np.float32)
op = core.CreateOperator(
"Adadelta",
["param", "moment", "moment_delta", "grad", "lr"],
["param", "moment", "moment_delta"],
epsilon=epsilon,
decay=decay,
device_option=gc,
)
self.assertReferenceChecks(
gc, op,
[param, moment, moment_delta, grad, lr],
functools.partial(self.ref_adadelta, epsilon=epsilon, decay=decay))
# Suppress filter_too_much health check.
# Likely caused by `assume` call falling through too often.
@settings(suppress_health_check=[HealthCheck.filter_too_much])
@given(inputs=hu.tensors(n=4),
lr=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
epsilon=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
decay=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
**hu.gcs)
def test_sparse_adadelta(self, inputs, lr, epsilon, decay, gc, dc):
param, moment, moment_delta, grad = inputs
moment = np.abs(moment)
lr = np.array([lr], dtype=np.float32)
# Create an indexing array containing values that are lists of indices,
# which index into grad
indices = np.random.choice(np.arange(grad.shape[0]),
size=np.random.randint(grad.shape[0]), replace=False)
# Sparsify grad
grad = grad[indices]
op = core.CreateOperator(
"SparseAdadelta",
["param", "moment", "moment_delta", "indices", "grad", "lr"],
["param", "moment", "moment_delta"],
epsilon=epsilon,
decay=decay,
device_option=gc)
def ref_sparse(param, moment, moment_delta, indices, grad, lr, decay,
ref_using_fp16):
param_out = np.copy(param)
moment_out = np.copy(moment)
moment_delta_out = np.copy(moment_delta)
for i, index in enumerate(indices):
param_out[index], moment_out[index], moment_delta_out[
index] = self.ref_adadelta(param[index], moment[index],
moment_delta[index], grad[i], lr,
epsilon, decay, ref_using_fp16)
return (param_out, moment_out, moment_delta_out)
ref_using_fp16_values = [False]
if dc == hu.gpu_do:
ref_using_fp16_values.append(True)
for ref_using_fp16 in ref_using_fp16_values:
moment_i = None
moment_delta_i = None
param_i = None
if(ref_using_fp16):
moment_i = moment.astype(np.float16)
moment_delta_i = moment_delta.astype(np.float16)
param_i = param.astype(np.float16)
else:
moment_i = moment.astype(np.float32)
moment_delta_i = moment_delta.astype(np.float32)
param_i = param.astype(np.float32)
self.assertReferenceChecks(gc, op, [
param_i, moment_i, moment_delta_i, indices, grad, lr, decay,
ref_using_fp16
], ref_sparse)
@serial.given(inputs=hu.tensors(n=3),
lr=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
epsilon=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
decay=st.floats(min_value=0.01, max_value=0.99,
allow_nan=False, allow_infinity=False),
data_strategy=st.data(),
**hu.gcs)
def test_sparse_adadelta_empty(self, inputs, lr, epsilon, decay,
data_strategy, gc, dc):
param, moment, moment_delta = inputs
moment = np.abs(moment)
lr = np.array([lr], dtype=np.float32)
grad = np.empty(shape=(0,) + param.shape[1:], dtype=np.float32)
indices = np.empty(shape=(0,), dtype=np.int64)
hypothesis.note('indices.shape: %s' % str(indices.shape))
op = core.CreateOperator(
"SparseAdadelta",
["param", "moment", "moment_delta", "indices", "grad", "lr"],
["param", "moment", "moment_delta"],
epsilon=epsilon,
decay=decay,
device_option=gc)
def ref_sparse_empty(param, moment, moment_delta, indices, grad, lr, decay):
param_out = np.copy(param)
moment_out = np.copy(moment)
moment_delta_out = np.copy(moment_delta)
return (param_out, moment_out, moment_delta_out)
ref_using_fp16_values = [False]
if dc == hu.gpu_do:
ref_using_fp16_values.append(True)
for ref_using_fp16 in ref_using_fp16_values:
moment_i = None
moment_delta_i = None
param_i = None
if(ref_using_fp16):
moment_i = moment.astype(np.float16)
moment_delta_i = moment_delta.astype(np.float16)
param_i = param.astype(np.float16)
else:
moment_i = moment.astype(np.float32)
moment_delta_i = moment_delta.astype(np.float32)
param_i = param.astype(np.float32)
self.assertReferenceChecks(
gc,
op,
[param_i, moment_i, moment_delta_i, indices, grad, lr, decay],
ref_sparse_empty
)
|
ryfeus/lambda-packs
|
pytorch/source/caffe2/python/operator_test/adadelta_test.py
|
Python
|
mit
| 7,954 | 0.002137 |
# Create your views here.
from django.shortcuts import render, get_object_or_404
from django.views import generic
from modelcontrol.models import Plant
from xmlrpclib import ServerProxy, Error
class IndexView(generic.ListView):
template_name = 'modelcontrol/index.html'
context_object_name = 'plant_list'
def get_queryset(self):
return Plant.objects.all()
def update(request, plant_id):
p = get_object_or_404(Plant, pk=plant_id)
try:
motor = ServerProxy('http://127.0.0.1:1337', allow_none=True)
if 'position' in request.POST:
p.servo.position = request.POST['position']
p.servo.save()
if 'speed' in request.POST:
p.motor.speed = request.POST['speed']
p.motor.save()
motor.set_rate(0, 7)
motor.set_rate(int(p.motor.speed), 25)
# set device
except (KeyError):
# error page
pass
plant_list = Plant.objects.all()
context = {'plant_list': plant_list}
return render(request, 'modelcontrol/index.html', context)
|
BrummbQ/plantcontrol
|
plant/modelcontrol/views.py
|
Python
|
gpl-3.0
| 1,079 | 0.000927 |
def update_time(sender, **kwargs):
"""
When a Comment is added, updates the Update to set "last_updated" time
"""
comment = kwargs['instance']
if comment.content_type.app_label == "happenings" and comment.content_type.name == "Update":
from .models import Update
item = Update.objects.get(id=comment.object_pk)
item.save()
|
tBaxter/tango-happenings
|
happenings/signals.py
|
Python
|
mit
| 368 | 0.002717 |
from django.db import models
class Person(models.Model):
first_name = models.CharField(max_length=25, default='Rico')
last_name = models.CharField(max_length=25, blank=True)
hair_color = models.CharField(max_length=10, blank=True)
eye_color = models.CharField(max_length=10)
age = models.IntegerField()
height = models.CharField(max_length=6)
favorite_animal = models.CharField(max_length=25, blank=True)
number_of_animals = models.IntegerField(null=True)
|
ricomoss/learn-tech
|
python/track_2/lesson2/apples_to_apples/common/models.py
|
Python
|
gpl-3.0
| 490 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import logging
import os
# import json as JSON
from network import Client
from PyQt4.QtCore import SIGNAL, QObject, QString
from PyQt4 import Qt, QtCore, QtGui
import threading
import socket
import Queue
import time
sys.path.append('../')
# from util.util import print_trace_exception
def singleton(cls, *args, **kw):
instances = {}
def _singleton():
if cls not in instances:
instances[cls] = cls(*args, **kw)
return instances[cls]
return _singleton
class BaseController(QObject):
_instance = None
def __init__(self, service_name):
QObject.__init__(self)
self.is_connecting = False
self.is_connected = False
self.service_name = service_name
self.connector = SafeConnector()
self.c = Client()
def connect_client(self, adr, port):
if not (self.is_connected or self.is_connecting):
self.is_connecting = True
self.c.connect(adr, port) # will not return any error code
# if ret == -1:
# self.is_connecting = False
# print_trace_exception()
# raise os.ConnectionError()
self.is_connected = True
self.is_connecting = False
def get_client(self):
return self.c
# Object of this class has to be shared between
# the two threads (Python and Qt one).
# Qt thread calls 'connect',
# Python thread calls 'emit'.
# The slot corresponding to the emitted signal
# will be called in Qt's thread.
@singleton
class SafeConnector:
def __init__(self):
self._rsock, self._wsock = socket.socketpair()
self._queue = Queue.Queue()
self._qt_object = QtCore.QObject()
self._notifier = QtCore.QSocketNotifier(self._rsock.fileno(),
QtCore.QSocketNotifier.Read)
self._notifier.activated.connect(self._recv)
def connect(self, signal, receiver):
QtCore.QObject.connect(self._qt_object, signal, receiver)
# should be called by Python thread
def emit(self, signal, *args):
self._queue.put((signal, args))
self._wsock.send('!')
# happens in Qt's main thread
def _recv(self):
self._rsock.recv(1)
signal, args = self._queue.get()
self._qt_object.emit(signal, *args)
|
dyf102/Gomoku-online
|
client/controller/basecontroller.py
|
Python
|
apache-2.0
| 2,390 | 0.000418 |
# Included modules
import os
import time
import json
import itertools
# Third party modules
import gevent
from Debug import Debug
from Config import config
from util import RateLimit
from util import StreamingMsgpack
from util import helper
from Plugin import PluginManager
FILE_BUFF = 1024 * 512
# Incoming requests
@PluginManager.acceptPlugins
class FileRequest(object):
__slots__ = ("server", "connection", "req_id", "sites", "log", "responded")
def __init__(self, server, connection):
self.server = server
self.connection = connection
self.req_id = None
self.sites = self.server.sites
self.log = server.log
self.responded = False # Responded to the request
def send(self, msg, streaming=False):
if not self.connection.closed:
self.connection.send(msg, streaming)
def sendRawfile(self, file, read_bytes):
if not self.connection.closed:
self.connection.sendRawfile(file, read_bytes)
def response(self, msg, streaming=False):
if self.responded:
if config.verbose:
self.log.debug("Req id %s already responded" % self.req_id)
return
if not isinstance(msg, dict): # If msg not a dict create a {"body": msg}
msg = {"body": msg}
msg["cmd"] = "response"
msg["to"] = self.req_id
self.responded = True
self.send(msg, streaming=streaming)
# Route file requests
def route(self, cmd, req_id, params):
self.req_id = req_id
# Don't allow other sites than locked
if "site" in params and self.connection.target_onion:
valid_sites = self.connection.getValidSites()
if params["site"] not in valid_sites:
self.response({"error": "Invalid site"})
self.connection.log(
"%s site lock violation: %s not in %s, target onion: %s" %
(params["site"], valid_sites, self.connection.target_onion)
)
self.connection.badAction(5)
return False
if cmd == "update":
event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"])
if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second
time.sleep(5)
self.response({"ok": "File update queued"})
# If called more than once within 15 sec only keep the last update
RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params)
else:
func_name = "action" + cmd[0].upper() + cmd[1:]
func = getattr(self, func_name, None)
if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions
s = time.time()
if self.connection.cpu_time > 0.5:
self.log.debug(
"Delay %s %s, cpu_time used by connection: %.3fs" %
(self.connection.ip, cmd, self.connection.cpu_time)
)
time.sleep(self.connection.cpu_time)
if self.connection.cpu_time > 5:
self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time)
if func:
func(params)
else:
self.actionUnknown(cmd, params)
if cmd not in ["getFile", "streamFile"]:
taken = time.time() - s
self.connection.cpu_time += taken
# Update a site file request
def actionUpdate(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(1)
return False
if not params["inner_path"].endswith("content.json"):
self.response({"error": "Only content.json update allowed"})
self.connection.badAction(5)
return
try:
content = json.loads(params["body"])
except Exception, err:
self.log.debug("Update for %s is invalid JSON: %s" % (params["inner_path"], err))
self.response({"error": "File invalid JSON"})
self.connection.badAction(5)
return
file_uri = "%s/%s:%s" % (site.address, params["inner_path"], content["modified"])
if self.server.files_parsing.get(file_uri): # Check if we already working on it
valid = None # Same file
else:
valid = site.content_manager.verifyFile(params["inner_path"], content)
if valid is True: # Valid and changed
self.log.info("Update for %s/%s looks valid, saving..." % (params["site"], params["inner_path"]))
self.server.files_parsing[file_uri] = True
site.storage.write(params["inner_path"], params["body"])
del params["body"]
site.onFileDone(params["inner_path"]) # Trigger filedone
if params["inner_path"].endswith("content.json"): # Download every changed file from peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
# On complete publish to other peers
diffs = params.get("diffs", {})
site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"], diffs=diffs, limit=2), "publish_%s" % params["inner_path"])
# Load new content file and download changed files in new thread
def downloader():
site.downloadContent(params["inner_path"], peer=peer, diffs=params.get("diffs", {}))
del self.server.files_parsing[file_uri]
gevent.spawn(downloader)
else:
del self.server.files_parsing[file_uri]
self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]})
self.connection.goodAction()
elif valid is None: # Not changed
if params.get("peer"):
peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer
else:
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer
if peer:
if not peer.connection:
peer.connect(self.connection) # Assign current connection to peer
if params["inner_path"] in site.content_manager.contents:
peer.last_content_json_update = site.content_manager.contents[params["inner_path"]]["modified"]
if config.verbose:
self.log.debug(
"Same version, adding new peer for locked files: %s, tasks: %s" %
(peer.key, len(site.worker_manager.tasks))
)
for task in site.worker_manager.tasks: # New peer add to every ongoing task
if task["peers"] and not task["optional_hash_id"]:
# Download file from this peer too if its peer locked
site.needFile(task["inner_path"], peer=peer, update=True, blocking=False)
self.response({"ok": "File not changed"})
self.connection.badAction()
else: # Invalid sign or sha hash
self.log.debug("Update for %s is invalid" % params["inner_path"])
self.response({"error": "File invalid"})
self.connection.badAction(5)
# Send file content request
def actionGetFile(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
try:
file_path = site.storage.getPath(params["inner_path"])
with StreamingMsgpack.FilePart(file_path, "rb") as file:
file.seek(params["location"])
file.read_bytes = FILE_BUFF
file_size = os.fstat(file.fileno()).st_size
if params["location"] > file_size:
self.connection.badAction(5)
raise Exception("Bad file location")
back = {
"body": file,
"size": file_size,
"location": min(file.tell() + FILE_BUFF, file_size)
}
self.response(back, streaming=True)
bytes_sent = min(FILE_BUFF, file_size - params["location"]) # Number of bytes we going to send
site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + bytes_sent
if config.debug_socket:
self.log.debug("File %s at position %s sent %s bytes" % (file_path, params["location"], bytes_sent))
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer
return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]}
except Exception, err:
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
self.response({"error": "File read error"})
return False
# New-style file streaming out of Msgpack context
def actionStreamFile(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
try:
if config.debug_socket:
self.log.debug("Opening file: %s" % params["inner_path"])
with site.storage.open(params["inner_path"]) as file:
file.seek(params["location"])
file_size = os.fstat(file.fileno()).st_size
stream_bytes = min(FILE_BUFF, file_size - params["location"])
if stream_bytes < 0:
self.connection.badAction(5)
raise Exception("Bad file location")
back = {
"size": file_size,
"location": min(file.tell() + FILE_BUFF, file_size),
"stream_bytes": stream_bytes
}
if config.debug_socket:
self.log.debug(
"Sending file %s from position %s to %s" %
(params["inner_path"], params["location"], back["location"])
)
self.response(back)
self.sendRawfile(file, read_bytes=FILE_BUFF)
site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + stream_bytes
if config.debug_socket:
self.log.debug("File %s at position %s sent %s bytes" % (params["inner_path"], params["location"], stream_bytes))
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer
return {"bytes_sent": stream_bytes, "file_size": file_size, "location": params["location"]}
except Exception, err:
self.log.debug("GetFile read error: %s" % Debug.formatException(err))
self.response({"error": "File read error"})
return False
# Peer exchange request
def actionPex(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
got_peer_keys = []
added = 0
# Add requester peer to site
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
if connected_peer: # It was not registered before
added += 1
connected_peer.connect(self.connection) # Assign current connection to peer
# Add sent peers to site
for packed_address in params.get("peers", []):
address = helper.unpackAddress(packed_address)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address):
added += 1
# Add sent peers to site
for packed_address in params.get("peers_onion", []):
address = helper.unpackOnionAddress(packed_address)
got_peer_keys.append("%s:%s" % address)
if site.addPeer(*address):
added += 1
# Send back peers that is not in the sent list and connectable (not port 0)
packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], got_peer_keys))
if added:
site.worker_manager.onPeers()
if config.verbose:
self.log.debug(
"Added %s peers to %s using pex, sending back %s" %
(added, site, len(packed_peers["ip4"]) + len(packed_peers["onion"]))
)
back = {}
if packed_peers["ip4"]:
back["peers"] = packed_peers["ip4"]
if packed_peers["onion"]:
back["peers_onion"] = packed_peers["onion"]
self.response(back)
# Get modified content.json files since
def actionListModified(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
modified_files = site.content_manager.listModified(params["since"])
# Add peer to site if not added before
connected_peer = site.addPeer(self.connection.ip, self.connection.port)
if connected_peer: # Just added
connected_peer.connect(self.connection) # Assign current connection to peer
self.response({"modified_files": modified_files})
def actionGetHashfield(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
return False
# Add peer to site if not added before
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True)
if not peer.connection: # Just added
peer.connect(self.connection) # Assign current connection to peer
peer.time_my_hashfield_sent = time.time() # Don't send again if not changed
self.response({"hashfield_raw": site.content_manager.hashfield.tostring()})
def findHashIds(self, site, hash_ids, limit=100):
back_ip4 = {}
back_onion = {}
found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit)
for hash_id, peers in found.iteritems():
back_onion[hash_id] = list(itertools.islice((
helper.packOnionAddress(peer.ip, peer.port)
for peer in peers
if peer.ip.endswith("onion")
), 50))
back_ip4[hash_id] = list(itertools.islice((
helper.packAddress(peer.ip, peer.port)
for peer in peers
if not peer.ip.endswith("onion")
), 50))
return back_ip4, back_onion
def actionFindHashIds(self, params):
site = self.sites.get(params["site"])
s = time.time()
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"]))
if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5):
time.sleep(0.1)
back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10)
else:
back_ip4, back_onion = self.findHashIds(site, params["hash_ids"])
RateLimit.called(event_key)
# Check my hashfield
if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion
my_ip = helper.packOnionAddress(self.server.tor_manager.site_onions[site.address], self.server.port)
my_back = back_onion
elif config.ip_external: # External ip defined
my_ip = helper.packAddress(config.ip_external, self.server.port)
my_back = back_ip4
else: # No external ip defined
my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port)
my_back = back_ip4
my_hashfield_set = set(site.content_manager.hashfield)
for hash_id in params["hash_ids"]:
if hash_id in my_hashfield_set:
if hash_id not in my_back:
my_back[hash_id] = []
my_back[hash_id].append(my_ip) # Add myself
if config.verbose:
self.log.debug(
"Found: IP4: %s, Onion: %s for %s hashids in %.3fs" %
(len(back_ip4), len(back_onion), len(params["hash_ids"]), time.time() - s)
)
self.response({"peers": back_ip4, "peers_onion": back_onion})
def actionSetHashfield(self, params):
site = self.sites.get(params["site"])
if not site or not site.settings["serving"]: # Site unknown or not serving
self.response({"error": "Unknown site"})
self.connection.badAction(5)
return False
# Add or get peer
peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection)
if not peer.connection:
peer.connect(self.connection)
peer.hashfield.replaceFromString(params["hashfield_raw"])
self.response({"ok": "Updated"})
def actionSiteReload(self, params):
if self.connection.ip not in config.ip_local and self.connection.ip != config.ip_external:
self.response({"error": "Only local host allowed"})
site = self.sites.get(params["site"])
site.content_manager.loadContent(params["inner_path"], add_bad_files=False)
site.storage.verifyFiles(quick_check=True)
site.updateWebsocket()
self.response({"ok": "Reloaded"})
def actionSitePublish(self, params):
if self.connection.ip not in config.ip_local and self.connection.ip != config.ip_external:
self.response({"error": "Only local host allowed"})
site = self.sites.get(params["site"])
num = site.publish(limit=8, inner_path=params.get("inner_path", "content.json"), diffs=params.get("diffs", {}))
self.response({"ok": "Successfuly published to %s peers" % num})
# Send a simple Pong! answer
def actionPing(self, params):
self.response("Pong!")
# Unknown command
def actionUnknown(self, cmd, params):
self.response({"error": "Unknown command: %s" % cmd})
self.connection.badAction(5)
|
frerepoulet/ZeroNet
|
src/File/FileRequest.py
|
Python
|
gpl-2.0
| 19,416 | 0.003348 |
# Copyright 2020 initOS GmbH
# Copyright 2012-2018 Therp BV <https://therp.nl>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
{
"name": "LDAP groups assignment",
"version": "11.0.1.0.0",
"depends": ["auth_ldap"],
"author": "initOS GmbH, Therp BV, Odoo Community Association (OCA)",
"website": "https://github.com/OCA/server-tools",
"license": "AGPL-3",
"summary": "Adds user accounts to groups based on rules defined "
"by the administrator.",
"category": "Authentication",
"data": [
'views/base_config_settings.xml',
'security/ir.model.access.csv',
],
"external_dependencies": {
'python': ['ldap'],
},
}
|
brain-tec/server-tools
|
users_ldap_groups/__manifest__.py
|
Python
|
agpl-3.0
| 704 | 0 |
"""
Support for plotting vector fields.
Presently this contains Quiver and Barb. Quiver plots an arrow in the
direction of the vector, with the size of the arrow related to the
magnitude of the vector.
Barbs are like quiver in that they point along a vector, but
the magnitude of the vector is given schematically by the presence of barbs
or flags on the barb.
This will also become a home for things such as standard
deviation ellipses, which can and will be derived very easily from
the Quiver code.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import weakref
import numpy as np
from numpy import ma
import matplotlib.collections as mcollections
import matplotlib.transforms as transforms
import matplotlib.text as mtext
import matplotlib.artist as martist
from matplotlib.artist import allow_rasterization
from matplotlib import docstring
import matplotlib.font_manager as font_manager
import matplotlib.cbook as cbook
from matplotlib.cbook import delete_masked_points
from matplotlib.patches import CirclePolygon
import math
_quiver_doc = """
Plot a 2-D field of arrows.
Call signatures::
quiver(U, V, **kw)
quiver(U, V, C, **kw)
quiver(X, Y, U, V, **kw)
quiver(X, Y, U, V, C, **kw)
*U* and *V* are the arrow data, *X* and *Y* set the location of the
arrows, and *C* sets the color of the arrows. These arguments may be 1-D or
2-D arrays or sequences.
If *X* and *Y* are absent, they will be generated as a uniform grid.
If *U* and *V* are 2-D arrays and *X* and *Y* are 1-D, and if ``len(X)`` and
``len(Y)`` match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
The default settings auto-scales the length of the arrows to a reasonable size.
To change this behavior see the *scale* and *scale_units* kwargs.
The defaults give a slightly swept-back arrow; to make the head a
triangle, make *headaxislength* the same as *headlength*. To make the
arrow more pointed, reduce *headwidth* or increase *headlength* and
*headaxislength*. To make the head smaller relative to the shaft,
scale down all the head parameters. You will probably do best to leave
minshaft alone.
*linewidths* and *edgecolors* can be used to customize the arrow
outlines.
Parameters
----------
X : 1D or 2D array, sequence, optional
The x coordinates of the arrow locations
Y : 1D or 2D array, sequence, optional
The y coordinates of the arrow locations
U : 1D or 2D array or masked array, sequence
The x components of the arrow vectors
V : 1D or 2D array or masked array, sequence
The y components of the arrow vectors
C : 1D or 2D array, sequence, optional
The arrow colors
units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ]
The arrow dimensions (except for *length*) are measured in multiples of
this unit.
'width' or 'height': the width or height of the axis
'dots' or 'inches': pixels or inches, based on the figure dpi
'x', 'y', or 'xy': respectively *X*, *Y*, or :math:`\\sqrt{X^2 + Y^2}`
in data units
The arrows scale differently depending on the units. For
'x' or 'y', the arrows get larger as one zooms in; for other
units, the arrow size is independent of the zoom state. For
'width or 'height', the arrow size increases with the width and
height of the axes, respectively, when the window is resized;
for 'dots' or 'inches', resizing does not change the arrows.
angles : [ 'uv' | 'xy' ], array, optional
Method for determining the angle of the arrows. Default is 'uv'.
'uv': the arrow axis aspect ratio is 1 so that
if *U*==*V* the orientation of the arrow on the plot is 45 degrees
counter-clockwise from the horizontal axis (positive to the right).
'xy': arrows point from (x,y) to (x+u, y+v).
Use this for plotting a gradient field, for example.
Alternatively, arbitrary angles may be specified as an array
of values in degrees, counter-clockwise from the horizontal axis.
Note: inverting a data axis will correspondingly invert the
arrows only with ``angles='xy'``.
scale : None, float, optional
Number of data units per arrow length unit, e.g., m/s per plot width; a
smaller scale parameter makes the arrow longer. Default is *None*.
If *None*, a simple autoscaling algorithm is used, based on the average
vector length and the number of vectors. The arrow length unit is given by
the *scale_units* parameter
scale_units : [ 'width' | 'height' | 'dots' | 'inches' | 'x' | 'y' | 'xy' ], \
None, optional
If the *scale* kwarg is *None*, the arrow length unit. Default is *None*.
e.g. *scale_units* is 'inches', *scale* is 2.0, and
``(u,v) = (1,0)``, then the vector will be 0.5 inches long.
If *scale_units* is 'width'/'height', then the vector will be half the
width/height of the axes.
If *scale_units* is 'x' then the vector will be 0.5 x-axis
units. To plot vectors in the x-y plane, with u and v having
the same units as x and y, use
``angles='xy', scale_units='xy', scale=1``.
width : scalar, optional
Shaft width in arrow units; default depends on choice of units,
above, and number of vectors; a typical starting value is about
0.005 times the width of the plot.
headwidth : scalar, optional
Head width as multiple of shaft width, default is 3
headlength : scalar, optional
Head length as multiple of shaft width, default is 5
headaxislength : scalar, optional
Head length at shaft intersection, default is 4.5
minshaft : scalar, optional
Length below which arrow scales, in units of head length. Do not
set this to less than 1, or small arrows will look terrible!
Default is 1
minlength : scalar, optional
Minimum length as a multiple of shaft width; if an arrow length
is less than this, plot a dot (hexagon) of this diameter instead.
Default is 1.
pivot : [ 'tail' | 'mid' | 'middle' | 'tip' ], optional
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*.
color : [ color | color sequence ], optional
This is a synonym for the
:class:`~matplotlib.collections.PolyCollection` facecolor kwarg.
If *C* has been set, *color* has no effect.
Notes
-----
Additional :class:`~matplotlib.collections.PolyCollection`
keyword arguments:
%(PolyCollection)s
See Also
--------
quiverkey : Add a key to a quiver plot
""" % docstring.interpd.params
_quiverkey_doc = """
Add a key to a quiver plot.
Call signature::
quiverkey(Q, X, Y, U, label, **kw)
Arguments:
*Q*:
The Quiver instance returned by a call to quiver.
*X*, *Y*:
The location of the key; additional explanation follows.
*U*:
The length of the key
*label*:
A string with the length and units of the key
Keyword arguments:
*angle* = 0
The angle of the key arrow. Measured in degrees anti-clockwise from the
x-axis.
*coordinates* = [ 'axes' | 'figure' | 'data' | 'inches' ]
Coordinate system and units for *X*, *Y*: 'axes' and 'figure' are
normalized coordinate systems with 0,0 in the lower left and 1,1
in the upper right; 'data' are the axes data coordinates (used for
the locations of the vectors in the quiver plot itself); 'inches'
is position in the figure in inches, with 0,0 at the lower left
corner.
*color*:
overrides face and edge colors from *Q*.
*labelpos* = [ 'N' | 'S' | 'E' | 'W' ]
Position the label above, below, to the right, to the left of the
arrow, respectively.
*labelsep*:
Distance in inches between the arrow and the label. Default is
0.1
*labelcolor*:
defaults to default :class:`~matplotlib.text.Text` color.
*fontproperties*:
A dictionary with keyword arguments accepted by the
:class:`~matplotlib.font_manager.FontProperties` initializer:
*family*, *style*, *variant*, *size*, *weight*
Any additional keyword arguments are used to override vector
properties taken from *Q*.
The positioning of the key depends on *X*, *Y*, *coordinates*, and
*labelpos*. If *labelpos* is 'N' or 'S', *X*, *Y* give the position
of the middle of the key arrow. If *labelpos* is 'E', *X*, *Y*
positions the head, and if *labelpos* is 'W', *X*, *Y* positions the
tail; in either of these two cases, *X*, *Y* is somewhere in the
middle of the arrow+label key object.
"""
class QuiverKey(martist.Artist):
""" Labelled arrow for use as a quiver plot scale key."""
halign = {'N': 'center', 'S': 'center', 'E': 'left', 'W': 'right'}
valign = {'N': 'bottom', 'S': 'top', 'E': 'center', 'W': 'center'}
pivot = {'N': 'middle', 'S': 'middle', 'E': 'tip', 'W': 'tail'}
def __init__(self, Q, X, Y, U, label, **kw):
martist.Artist.__init__(self)
self.Q = Q
self.X = X
self.Y = Y
self.U = U
self.angle = kw.pop('angle', 0)
self.coord = kw.pop('coordinates', 'axes')
self.color = kw.pop('color', None)
self.label = label
self._labelsep_inches = kw.pop('labelsep', 0.1)
self.labelsep = (self._labelsep_inches * Q.ax.figure.dpi)
# try to prevent closure over the real self
weak_self = weakref.ref(self)
def on_dpi_change(fig):
self_weakref = weak_self()
if self_weakref is not None:
self_weakref.labelsep = (self_weakref._labelsep_inches*fig.dpi)
self_weakref._initialized = False # simple brute force update
# works because _init is
# called at the start of
# draw.
self._cid = Q.ax.figure.callbacks.connect('dpi_changed',
on_dpi_change)
self.labelpos = kw.pop('labelpos', 'N')
self.labelcolor = kw.pop('labelcolor', None)
self.fontproperties = kw.pop('fontproperties', dict())
self.kw = kw
_fp = self.fontproperties
# boxprops = dict(facecolor='red')
self.text = mtext.Text(
text=label, # bbox=boxprops,
horizontalalignment=self.halign[self.labelpos],
verticalalignment=self.valign[self.labelpos],
fontproperties=font_manager.FontProperties(**_fp))
if self.labelcolor is not None:
self.text.set_color(self.labelcolor)
self._initialized = False
self.zorder = Q.zorder + 0.1
def remove(self):
"""
Overload the remove method
"""
self.Q.ax.figure.callbacks.disconnect(self._cid)
self._cid = None
# pass the remove call up the stack
martist.Artist.remove(self)
__init__.__doc__ = _quiverkey_doc
def _init(self):
if True: # not self._initialized:
if not self.Q._initialized:
self.Q._init()
self._set_transform()
_pivot = self.Q.pivot
self.Q.pivot = self.pivot[self.labelpos]
# Hack: save and restore the Umask
_mask = self.Q.Umask
self.Q.Umask = ma.nomask
self.verts = self.Q._make_verts(np.array([self.U]),
np.zeros((1,)),
self.angle)
self.Q.Umask = _mask
self.Q.pivot = _pivot
kw = self.Q.polykw
kw.update(self.kw)
self.vector = mcollections.PolyCollection(
self.verts,
offsets=[(self.X, self.Y)],
transOffset=self.get_transform(),
**kw)
if self.color is not None:
self.vector.set_color(self.color)
self.vector.set_transform(self.Q.get_transform())
self.vector.set_figure(self.get_figure())
self._initialized = True
def _text_x(self, x):
if self.labelpos == 'E':
return x + self.labelsep
elif self.labelpos == 'W':
return x - self.labelsep
else:
return x
def _text_y(self, y):
if self.labelpos == 'N':
return y + self.labelsep
elif self.labelpos == 'S':
return y - self.labelsep
else:
return y
@allow_rasterization
def draw(self, renderer):
self._init()
self.vector.draw(renderer)
x, y = self.get_transform().transform_point((self.X, self.Y))
self.text.set_x(self._text_x(x))
self.text.set_y(self._text_y(y))
self.text.draw(renderer)
self.stale = False
def _set_transform(self):
if self.coord == 'data':
self.set_transform(self.Q.ax.transData)
elif self.coord == 'axes':
self.set_transform(self.Q.ax.transAxes)
elif self.coord == 'figure':
self.set_transform(self.Q.ax.figure.transFigure)
elif self.coord == 'inches':
self.set_transform(self.Q.ax.figure.dpi_scale_trans)
else:
raise ValueError('unrecognized coordinates')
def set_figure(self, fig):
martist.Artist.set_figure(self, fig)
self.text.set_figure(fig)
def contains(self, mouseevent):
# Maybe the dictionary should allow one to
# distinguish between a text hit and a vector hit.
if (self.text.contains(mouseevent)[0] or
self.vector.contains(mouseevent)[0]):
return True, {}
return False, {}
quiverkey_doc = _quiverkey_doc
# This is a helper function that parses out the various combination of
# arguments for doing colored vector plots. Pulling it out here
# allows both Quiver and Barbs to use it
def _parse_args(*args):
X, Y, U, V, C = [None] * 5
args = list(args)
# The use of atleast_1d allows for handling scalar arguments while also
# keeping masked arrays
if len(args) == 3 or len(args) == 5:
C = np.atleast_1d(args.pop(-1))
V = np.atleast_1d(args.pop(-1))
U = np.atleast_1d(args.pop(-1))
if U.ndim == 1:
nr, nc = 1, U.shape[0]
else:
nr, nc = U.shape
if len(args) == 2: # remaining after removing U,V,C
X, Y = [np.array(a).ravel() for a in args]
if len(X) == nc and len(Y) == nr:
X, Y = [a.ravel() for a in np.meshgrid(X, Y)]
else:
indexgrid = np.meshgrid(np.arange(nc), np.arange(nr))
X, Y = [np.ravel(a) for a in indexgrid]
return X, Y, U, V, C
def _check_consistent_shapes(*arrays):
all_shapes = set(a.shape for a in arrays)
if len(all_shapes) != 1:
raise ValueError('The shapes of the passed in arrays do not match.')
class Quiver(mcollections.PolyCollection):
"""
Specialized PolyCollection for arrows.
The only API method is set_UVC(), which can be used
to change the size, orientation, and color of the
arrows; their locations are fixed when the class is
instantiated. Possibly this method will be useful
in animations.
Much of the work in this class is done in the draw()
method so that as much information as possible is available
about the plot. In subsequent draw() calls, recalculation
is limited to things that might have changed, so there
should be no performance penalty from putting the calculations
in the draw() method.
"""
_PIVOT_VALS = ('tail', 'mid', 'middle', 'tip')
@docstring.Substitution(_quiver_doc)
def __init__(self, ax, *args, **kw):
"""
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%s
"""
self.ax = ax
X, Y, U, V, C = _parse_args(*args)
self.X = X
self.Y = Y
self.XY = np.hstack((X[:, np.newaxis], Y[:, np.newaxis]))
self.N = len(X)
self.scale = kw.pop('scale', None)
self.headwidth = kw.pop('headwidth', 3)
self.headlength = float(kw.pop('headlength', 5))
self.headaxislength = kw.pop('headaxislength', 4.5)
self.minshaft = kw.pop('minshaft', 1)
self.minlength = kw.pop('minlength', 1)
self.units = kw.pop('units', 'width')
self.scale_units = kw.pop('scale_units', None)
self.angles = kw.pop('angles', 'uv')
self.width = kw.pop('width', None)
self.color = kw.pop('color', 'k')
pivot = kw.pop('pivot', 'tail').lower()
# validate pivot
if pivot not in self._PIVOT_VALS:
raise ValueError(
'pivot must be one of {keys}, you passed {inp}'.format(
keys=self._PIVOT_VALS, inp=pivot))
# normalize to 'middle'
if pivot == 'mid':
pivot = 'middle'
self.pivot = pivot
self.transform = kw.pop('transform', ax.transData)
kw.setdefault('facecolors', self.color)
kw.setdefault('linewidths', (0,))
mcollections.PolyCollection.__init__(self, [], offsets=self.XY,
transOffset=self.transform,
closed=False,
**kw)
self.polykw = kw
self.set_UVC(U, V, C)
self._initialized = False
self.keyvec = None
self.keytext = None
# try to prevent closure over the real self
weak_self = weakref.ref(self)
def on_dpi_change(fig):
self_weakref = weak_self()
if self_weakref is not None:
self_weakref._new_UV = True # vertices depend on width, span
# which in turn depend on dpi
self_weakref._initialized = False # simple brute force update
# works because _init is
# called at the start of
# draw.
self._cid = self.ax.figure.callbacks.connect('dpi_changed',
on_dpi_change)
def remove(self):
"""
Overload the remove method
"""
# disconnect the call back
self.ax.figure.callbacks.disconnect(self._cid)
self._cid = None
# pass the remove call up the stack
mcollections.PolyCollection.remove(self)
def _init(self):
"""
Initialization delayed until first draw;
allow time for axes setup.
"""
# It seems that there are not enough event notifications
# available to have this work on an as-needed basis at present.
if True: # not self._initialized:
trans = self._set_transform()
ax = self.ax
sx, sy = trans.inverted().transform_point(
(ax.bbox.width, ax.bbox.height))
self.span = sx
if self.width is None:
sn = np.clip(math.sqrt(self.N), 8, 25)
self.width = 0.06 * self.span / sn
# _make_verts sets self.scale if not already specified
if not self._initialized and self.scale is None:
self._make_verts(self.U, self.V, self.angles)
self._initialized = True
def get_datalim(self, transData):
trans = self.get_transform()
transOffset = self.get_offset_transform()
full_transform = (trans - transData) + (transOffset - transData)
XY = full_transform.transform(self.XY)
bbox = transforms.Bbox.null()
bbox.update_from_data_xy(XY, ignore=True)
return bbox
@allow_rasterization
def draw(self, renderer):
self._init()
verts = self._make_verts(self.U, self.V, self.angles)
self.set_verts(verts, closed=False)
self._new_UV = False
mcollections.PolyCollection.draw(self, renderer)
self.stale = False
def set_UVC(self, U, V, C=None):
# We need to ensure we have a copy, not a reference
# to an array that might change before draw().
U = ma.masked_invalid(U, copy=True).ravel()
V = ma.masked_invalid(V, copy=True).ravel()
mask = ma.mask_or(U.mask, V.mask, copy=False, shrink=True)
if C is not None:
C = ma.masked_invalid(C, copy=True).ravel()
mask = ma.mask_or(mask, C.mask, copy=False, shrink=True)
if mask is ma.nomask:
C = C.filled()
else:
C = ma.array(C, mask=mask, copy=False)
self.U = U.filled(1)
self.V = V.filled(1)
self.Umask = mask
if C is not None:
self.set_array(C)
self._new_UV = True
self.stale = True
def _dots_per_unit(self, units):
"""
Return a scale factor for converting from units to pixels
"""
ax = self.ax
if units in ('x', 'y', 'xy'):
if units == 'x':
dx0 = ax.viewLim.width
dx1 = ax.bbox.width
elif units == 'y':
dx0 = ax.viewLim.height
dx1 = ax.bbox.height
else: # 'xy' is assumed
dxx0 = ax.viewLim.width
dxx1 = ax.bbox.width
dyy0 = ax.viewLim.height
dyy1 = ax.bbox.height
dx1 = np.hypot(dxx1, dyy1)
dx0 = np.hypot(dxx0, dyy0)
dx = dx1 / dx0
else:
if units == 'width':
dx = ax.bbox.width
elif units == 'height':
dx = ax.bbox.height
elif units == 'dots':
dx = 1.0
elif units == 'inches':
dx = ax.figure.dpi
else:
raise ValueError('unrecognized units')
return dx
def _set_transform(self):
"""
Sets the PolygonCollection transform to go
from arrow width units to pixels.
"""
dx = self._dots_per_unit(self.units)
self._trans_scale = dx # pixels per arrow width unit
trans = transforms.Affine2D().scale(dx)
self.set_transform(trans)
return trans
def _angles_lengths(self, U, V, eps=1):
xy = self.ax.transData.transform(self.XY)
uv = np.hstack((U[:, np.newaxis], V[:, np.newaxis]))
xyp = self.ax.transData.transform(self.XY + eps * uv)
dxy = xyp - xy
angles = np.arctan2(dxy[:, 1], dxy[:, 0])
lengths = np.hypot(*dxy.T) / eps
return angles, lengths
def _make_verts(self, U, V, angles):
uv = (U + V * 1j)
str_angles = angles if isinstance(angles, six.string_types) else ''
if str_angles == 'xy' and self.scale_units == 'xy':
# Here eps is 1 so that if we get U, V by diffing
# the X, Y arrays, the vectors will connect the
# points, regardless of the axis scaling (including log).
angles, lengths = self._angles_lengths(U, V, eps=1)
elif str_angles == 'xy' or self.scale_units == 'xy':
# Calculate eps based on the extents of the plot
# so that we don't end up with roundoff error from
# adding a small number to a large.
eps = np.abs(self.ax.dataLim.extents).max() * 0.001
angles, lengths = self._angles_lengths(U, V, eps=eps)
if str_angles and self.scale_units == 'xy':
a = lengths
else:
a = np.abs(uv)
if self.scale is None:
sn = max(10, math.sqrt(self.N))
if self.Umask is not ma.nomask:
amean = a[~self.Umask].mean()
else:
amean = a.mean()
# crude auto-scaling
# scale is typical arrow length as a multiple of the arrow width
scale = 1.8 * amean * sn / self.span
if self.scale_units is None:
if self.scale is None:
self.scale = scale
widthu_per_lenu = 1.0
else:
if self.scale_units == 'xy':
dx = 1
else:
dx = self._dots_per_unit(self.scale_units)
widthu_per_lenu = dx / self._trans_scale
if self.scale is None:
self.scale = scale * widthu_per_lenu
length = a * (widthu_per_lenu / (self.scale * self.width))
X, Y = self._h_arrows(length)
if str_angles == 'xy':
theta = angles
elif str_angles == 'uv':
theta = np.angle(uv)
else:
theta = ma.masked_invalid(np.deg2rad(angles)).filled(0)
theta = theta.reshape((-1, 1)) # for broadcasting
xy = (X + Y * 1j) * np.exp(1j * theta) * self.width
xy = xy[:, :, np.newaxis]
XY = np.concatenate((xy.real, xy.imag), axis=2)
if self.Umask is not ma.nomask:
XY = ma.array(XY)
XY[self.Umask] = ma.masked
# This might be handled more efficiently with nans, given
# that nans will end up in the paths anyway.
return XY
def _h_arrows(self, length):
""" length is in arrow width units """
# It might be possible to streamline the code
# and speed it up a bit by using complex (x,y)
# instead of separate arrays; but any gain would be slight.
minsh = self.minshaft * self.headlength
N = len(length)
length = length.reshape(N, 1)
# This number is chosen based on when pixel values overflow in Agg
# causing rendering errors
# length = np.minimum(length, 2 ** 16)
np.clip(length, 0, 2 ** 16, out=length)
# x, y: normal horizontal arrow
x = np.array([0, -self.headaxislength,
-self.headlength, 0],
np.float64)
x = x + np.array([0, 1, 1, 1]) * length
y = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
y = np.repeat(y[np.newaxis, :], N, axis=0)
# x0, y0: arrow without shaft, for short vectors
x0 = np.array([0, minsh - self.headaxislength,
minsh - self.headlength, minsh], np.float64)
y0 = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
ii = [0, 1, 2, 3, 2, 1, 0, 0]
X = x.take(ii, 1)
Y = y.take(ii, 1)
Y[:, 3:-1] *= -1
X0 = x0.take(ii)
Y0 = y0.take(ii)
Y0[3:-1] *= -1
shrink = length / minsh if minsh != 0. else 0.
X0 = shrink * X0[np.newaxis, :]
Y0 = shrink * Y0[np.newaxis, :]
short = np.repeat(length < minsh, 8, axis=1)
# Now select X0, Y0 if short, otherwise X, Y
np.copyto(X, X0, where=short)
np.copyto(Y, Y0, where=short)
if self.pivot == 'middle':
X -= 0.5 * X[:, 3, np.newaxis]
elif self.pivot == 'tip':
X = X - X[:, 3, np.newaxis] # numpy bug? using -= does not
# work here unless we multiply
# by a float first, as with 'mid'.
elif self.pivot != 'tail':
raise ValueError(("Quiver.pivot must have value in {{'middle', "
"'tip', 'tail'}} not {0}").format(self.pivot))
tooshort = length < self.minlength
if tooshort.any():
# Use a heptagonal dot:
th = np.arange(0, 8, 1, np.float64) * (np.pi / 3.0)
x1 = np.cos(th) * self.minlength * 0.5
y1 = np.sin(th) * self.minlength * 0.5
X1 = np.repeat(x1[np.newaxis, :], N, axis=0)
Y1 = np.repeat(y1[np.newaxis, :], N, axis=0)
tooshort = np.repeat(tooshort, 8, 1)
np.copyto(X, X1, where=tooshort)
np.copyto(Y, Y1, where=tooshort)
# Mask handling is deferred to the caller, _make_verts.
return X, Y
quiver_doc = _quiver_doc
_barbs_doc = r"""
Plot a 2-D field of barbs.
Call signatures::
barb(U, V, **kw)
barb(U, V, C, **kw)
barb(X, Y, U, V, **kw)
barb(X, Y, U, V, C, **kw)
Arguments:
*X*, *Y*:
The x and y coordinates of the barb locations
(default is head of barb; see *pivot* kwarg)
*U*, *V*:
Give the x and y components of the barb shaft
*C*:
An optional array used to map colors to the barbs
All arguments may be 1-D or 2-D arrays or sequences. If *X* and *Y*
are absent, they will be generated as a uniform grid. If *U* and *V*
are 2-D arrays but *X* and *Y* are 1-D, and if ``len(X)`` and ``len(Y)``
match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
*U*, *V*, *C* may be masked arrays, but masked *X*, *Y* are not
supported at present.
Keyword arguments:
*length*:
Length of the barb in points; the other parts of the barb
are scaled against this.
Default is 7.
*pivot*: [ 'tip' | 'middle' | float ]
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*. Default is 'tip'. Can
also be a number, which shifts the start of the barb that many
points from the origin.
*barbcolor*: [ color | color sequence ]
Specifies the color all parts of the barb except any flags. This
parameter is analagous to the *edgecolor* parameter for polygons,
which can be used instead. However this parameter will override
facecolor.
*flagcolor*: [ color | color sequence ]
Specifies the color of any flags on the barb. This parameter is
analagous to the *facecolor* parameter for polygons, which can be
used instead. However this parameter will override facecolor. If
this is not set (and *C* has not either) then *flagcolor* will be
set to match *barbcolor* so that the barb has a uniform color. If
*C* has been set, *flagcolor* has no effect.
*sizes*:
A dictionary of coefficients specifying the ratio of a given
feature to the length of the barb. Only those values one wishes to
override need to be included. These features include:
- 'spacing' - space between features (flags, full/half barbs)
- 'height' - height (distance from shaft to top) of a flag or
full barb
- 'width' - width of a flag, twice the width of a full barb
- 'emptybarb' - radius of the circle used for low magnitudes
*fill_empty*:
A flag on whether the empty barbs (circles) that are drawn should
be filled with the flag color. If they are not filled, they will
be drawn such that no color is applied to the center. Default is
False
*rounding*:
A flag to indicate whether the vector magnitude should be rounded
when allocating barb components. If True, the magnitude is
rounded to the nearest multiple of the half-barb increment. If
False, the magnitude is simply truncated to the next lowest
multiple. Default is True
*barb_increments*:
A dictionary of increments specifying values to associate with
different parts of the barb. Only those values one wishes to
override need to be included.
- 'half' - half barbs (Default is 5)
- 'full' - full barbs (Default is 10)
- 'flag' - flags (default is 50)
*flip_barb*:
Either a single boolean flag or an array of booleans. Single
boolean indicates whether the lines and flags should point
opposite to normal for all barbs. An array (which should be the
same size as the other data arrays) indicates whether to flip for
each individual barb. Normal behavior is for the barbs and lines
to point right (comes from wind barbs having these features point
towards low pressure in the Northern Hemisphere.) Default is
False
Barbs are traditionally used in meteorology as a way to plot the speed
and direction of wind observations, but can technically be used to
plot any two dimensional vector quantity. As opposed to arrows, which
give vector magnitude by the length of the arrow, the barbs give more
quantitative information about the vector magnitude by putting slanted
lines or a triangle for various increments in magnitude, as show
schematically below::
: /\ \\
: / \ \\
: / \ \ \\
: / \ \ \\
: ------------------------------
.. note the double \\ at the end of each line to make the figure
.. render correctly
The largest increment is given by a triangle (or "flag"). After those
come full lines (barbs). The smallest increment is a half line. There
is only, of course, ever at most 1 half line. If the magnitude is
small and only needs a single half-line and no full lines or
triangles, the half-line is offset from the end of the barb so that it
can be easily distinguished from barbs with a single full line. The
magnitude for the barb shown above would nominally be 65, using the
standard increments of 50, 10, and 5.
linewidths and edgecolors can be used to customize the barb.
Additional :class:`~matplotlib.collections.PolyCollection` keyword
arguments:
%(PolyCollection)s
""" % docstring.interpd.params
docstring.interpd.update(barbs_doc=_barbs_doc)
class Barbs(mcollections.PolyCollection):
'''
Specialized PolyCollection for barbs.
The only API method is :meth:`set_UVC`, which can be used to
change the size, orientation, and color of the arrows. Locations
are changed using the :meth:`set_offsets` collection method.
Possibly this method will be useful in animations.
There is one internal function :meth:`_find_tails` which finds
exactly what should be put on the barb given the vector magnitude.
From there :meth:`_make_barbs` is used to find the vertices of the
polygon to represent the barb based on this information.
'''
# This may be an abuse of polygons here to render what is essentially maybe
# 1 triangle and a series of lines. It works fine as far as I can tell
# however.
@docstring.interpd
def __init__(self, ax, *args, **kw):
"""
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%(barbs_doc)s
"""
self._pivot = kw.pop('pivot', 'tip')
self._length = kw.pop('length', 7)
barbcolor = kw.pop('barbcolor', None)
flagcolor = kw.pop('flagcolor', None)
self.sizes = kw.pop('sizes', dict())
self.fill_empty = kw.pop('fill_empty', False)
self.barb_increments = kw.pop('barb_increments', dict())
self.rounding = kw.pop('rounding', True)
self.flip = kw.pop('flip_barb', False)
transform = kw.pop('transform', ax.transData)
# Flagcolor and barbcolor provide convenience parameters for
# setting the facecolor and edgecolor, respectively, of the barb
# polygon. We also work here to make the flag the same color as the
# rest of the barb by default
if None in (barbcolor, flagcolor):
kw['edgecolors'] = 'face'
if flagcolor:
kw['facecolors'] = flagcolor
elif barbcolor:
kw['facecolors'] = barbcolor
else:
# Set to facecolor passed in or default to black
kw.setdefault('facecolors', 'k')
else:
kw['edgecolors'] = barbcolor
kw['facecolors'] = flagcolor
# Explicitly set a line width if we're not given one, otherwise
# polygons are not outlined and we get no barbs
if 'linewidth' not in kw and 'lw' not in kw:
kw['linewidth'] = 1
# Parse out the data arrays from the various configurations supported
x, y, u, v, c = _parse_args(*args)
self.x = x
self.y = y
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
# Make a collection
barb_size = self._length ** 2 / 4 # Empirically determined
mcollections.PolyCollection.__init__(self, [], (barb_size,),
offsets=xy,
transOffset=transform, **kw)
self.set_transform(transforms.IdentityTransform())
self.set_UVC(u, v, c)
def _find_tails(self, mag, rounding=True, half=5, full=10, flag=50):
'''
Find how many of each of the tail pieces is necessary. Flag
specifies the increment for a flag, barb for a full barb, and half for
half a barb. Mag should be the magnitude of a vector (i.e., >= 0).
This returns a tuple of:
(*number of flags*, *number of barbs*, *half_flag*, *empty_flag*)
*half_flag* is a boolean whether half of a barb is needed,
since there should only ever be one half on a given
barb. *empty_flag* flag is an array of flags to easily tell if
a barb is empty (too low to plot any barbs/flags.
'''
# If rounding, round to the nearest multiple of half, the smallest
# increment
if rounding:
mag = half * (mag / half + 0.5).astype(int)
num_flags = np.floor(mag / flag).astype(int)
mag = np.mod(mag, flag)
num_barb = np.floor(mag / full).astype(int)
mag = np.mod(mag, full)
half_flag = mag >= half
empty_flag = ~(half_flag | (num_flags > 0) | (num_barb > 0))
return num_flags, num_barb, half_flag, empty_flag
def _make_barbs(self, u, v, nflags, nbarbs, half_barb, empty_flag, length,
pivot, sizes, fill_empty, flip):
'''
This function actually creates the wind barbs. *u* and *v*
are components of the vector in the *x* and *y* directions,
respectively.
*nflags*, *nbarbs*, and *half_barb*, empty_flag* are,
*respectively, the number of flags, number of barbs, flag for
*half a barb, and flag for empty barb, ostensibly obtained
*from :meth:`_find_tails`.
*length* is the length of the barb staff in points.
*pivot* specifies the point on the barb around which the
entire barb should be rotated. Right now, valid options are
'tip' and 'middle'. Can also be a number, which shifts the start
of the barb that many points from the origin.
*sizes* is a dictionary of coefficients specifying the ratio
of a given feature to the length of the barb. These features
include:
- *spacing*: space between features (flags, full/half
barbs)
- *height*: distance from shaft of top of a flag or full
barb
- *width* - width of a flag, twice the width of a full barb
- *emptybarb* - radius of the circle used for low
magnitudes
*fill_empty* specifies whether the circle representing an
empty barb should be filled or not (this changes the drawing
of the polygon).
*flip* is a flag indicating whether the features should be flipped to
the other side of the barb (useful for winds in the southern
hemisphere).
This function returns list of arrays of vertices, defining a polygon
for each of the wind barbs. These polygons have been rotated to
properly align with the vector direction.
'''
# These control the spacing and size of barb elements relative to the
# length of the shaft
spacing = length * sizes.get('spacing', 0.125)
full_height = length * sizes.get('height', 0.4)
full_width = length * sizes.get('width', 0.25)
empty_rad = length * sizes.get('emptybarb', 0.15)
# Controls y point where to pivot the barb.
pivot_points = dict(tip=0.0, middle=-length / 2.)
# Check for flip
if flip:
full_height = -full_height
endx = 0.0
try:
endy = float(pivot)
except ValueError:
endy = pivot_points[pivot.lower()]
# Get the appropriate angle for the vector components. The offset is
# due to the way the barb is initially drawn, going down the y-axis.
# This makes sense in a meteorological mode of thinking since there 0
# degrees corresponds to north (the y-axis traditionally)
angles = -(ma.arctan2(v, u) + np.pi / 2)
# Used for low magnitude. We just get the vertices, so if we make it
# out here, it can be reused. The center set here should put the
# center of the circle at the location(offset), rather than at the
# same point as the barb pivot; this seems more sensible.
circ = CirclePolygon((0, 0), radius=empty_rad).get_verts()
if fill_empty:
empty_barb = circ
else:
# If we don't want the empty one filled, we make a degenerate
# polygon that wraps back over itself
empty_barb = np.concatenate((circ, circ[::-1]))
barb_list = []
for index, angle in np.ndenumerate(angles):
# If the vector magnitude is too weak to draw anything, plot an
# empty circle instead
if empty_flag[index]:
# We can skip the transform since the circle has no preferred
# orientation
barb_list.append(empty_barb)
continue
poly_verts = [(endx, endy)]
offset = length
# Add vertices for each flag
for i in range(nflags[index]):
# The spacing that works for the barbs is a little to much for
# the flags, but this only occurs when we have more than 1
# flag.
if offset != length:
offset += spacing / 2.
poly_verts.extend(
[[endx, endy + offset],
[endx + full_height, endy - full_width / 2 + offset],
[endx, endy - full_width + offset]])
offset -= full_width + spacing
# Add vertices for each barb. These really are lines, but works
# great adding 3 vertices that basically pull the polygon out and
# back down the line
for i in range(nbarbs[index]):
poly_verts.extend(
[(endx, endy + offset),
(endx + full_height, endy + offset + full_width / 2),
(endx, endy + offset)])
offset -= spacing
# Add the vertices for half a barb, if needed
if half_barb[index]:
# If the half barb is the first on the staff, traditionally it
# is offset from the end to make it easy to distinguish from a
# barb with a full one
if offset == length:
poly_verts.append((endx, endy + offset))
offset -= 1.5 * spacing
poly_verts.extend(
[(endx, endy + offset),
(endx + full_height / 2, endy + offset + full_width / 4),
(endx, endy + offset)])
# Rotate the barb according the angle. Making the barb first and
# then rotating it made the math for drawing the barb really easy.
# Also, the transform framework makes doing the rotation simple.
poly_verts = transforms.Affine2D().rotate(-angle).transform(
poly_verts)
barb_list.append(poly_verts)
return barb_list
def set_UVC(self, U, V, C=None):
self.u = ma.masked_invalid(U, copy=False).ravel()
self.v = ma.masked_invalid(V, copy=False).ravel()
if C is not None:
c = ma.masked_invalid(C, copy=False).ravel()
x, y, u, v, c = delete_masked_points(self.x.ravel(),
self.y.ravel(),
self.u, self.v, c)
_check_consistent_shapes(x, y, u, v, c)
else:
x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
_check_consistent_shapes(x, y, u, v)
magnitude = np.hypot(u, v)
flags, barbs, halves, empty = self._find_tails(magnitude,
self.rounding,
**self.barb_increments)
# Get the vertices for each of the barbs
plot_barbs = self._make_barbs(u, v, flags, barbs, halves, empty,
self._length, self._pivot, self.sizes,
self.fill_empty, self.flip)
self.set_verts(plot_barbs)
# Set the color array
if C is not None:
self.set_array(c)
# Update the offsets in case the masked data changed
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
self._offsets = xy
self.stale = True
def set_offsets(self, xy):
"""
Set the offsets for the barb polygons. This saves the offsets passed
in and actually sets version masked as appropriate for the existing
U/V data. *offsets* should be a sequence.
ACCEPTS: sequence of pairs of floats
"""
self.x = xy[:, 0]
self.y = xy[:, 1]
x, y, u, v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
_check_consistent_shapes(x, y, u, v)
xy = np.hstack((x[:, np.newaxis], y[:, np.newaxis]))
mcollections.PolyCollection.set_offsets(self, xy)
self.stale = True
set_offsets.__doc__ = mcollections.PolyCollection.set_offsets.__doc__
barbs_doc = _barbs_doc
|
jonyroda97/redbot-amigosprovaveis
|
lib/matplotlib/quiver.py
|
Python
|
gpl-3.0
| 46,115 | 0.00039 |
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.backend.core.tasks.task import Task
from pants.util.dirutil import safe_mkdir, safe_open, safe_rmtree
class RuntimeClasspathPublisher(Task):
"""Creates symlinks in pants_distdir to classpath entries per target."""
@classmethod
def prepare(cls, options, round_manager):
round_manager.require_data('runtime_classpath')
@property
def _output_folder(self):
return self.options_scope.replace('.', os.sep)
def _stable_output_folder(self, target):
"""
:type target: pants.build_graph.target.Target
"""
address = target.address
return os.path.join(
self.get_options().pants_distdir,
self._output_folder,
# target.address.spec is used in export goal to identify targets
address.spec.replace(':', os.sep) if address.spec_path else address.target_name,
)
def execute(self):
runtime_classpath = self.context.products.get_data('runtime_classpath')
for target in self.context.targets():
folder_for_symlinks = self._stable_output_folder(target)
safe_rmtree(folder_for_symlinks)
classpath_entries_for_target = runtime_classpath.get_internal_classpath_entries_for_targets(
[target])
if len(classpath_entries_for_target) > 0:
safe_mkdir(folder_for_symlinks)
classpath = []
for (index, (conf, entry)) in enumerate(classpath_entries_for_target):
classpath.append(entry.path)
file_name = os.path.basename(entry.path)
# Avoid name collisions
symlink_name = '{}-{}'.format(index, file_name)
os.symlink(entry.path, os.path.join(folder_for_symlinks, symlink_name))
with safe_open(os.path.join(folder_for_symlinks, 'classpath.txt'), 'w') as classpath_file:
classpath_file.write(os.pathsep.join(classpath))
classpath_file.write('\n')
|
qma/pants
|
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_classpath_publisher.py
|
Python
|
apache-2.0
| 2,151 | 0.010693 |
#!/usr/bin/env python3
# Optimizer calibration test for bitmap and brin indexes, also btree on AO tables
#
# This program runs a set of queries, varying several parameters:
#
# - Selectivity of predicates
# - Plan type (plan chosen by the optimizer, various forced plans)
# - Width of the selected columns
#
# The program then reports the result of explaining these queries and
# letting the optimizer choose plans vs. forcing plans. Execution time
# can be reported, computing a mean and standard deviation of several
# query executions.
#
# The printed results are useful to copy and paste into a Google Sheet
# (expand columns after pasting)
#
# Run this program with the -h or --help option to see argument syntax
#
# See comment "How to add a test" below in the program for how to
# extend this program.
import argparse
import time
import re
import math
import os
import sys
try:
from gppylib.db import dbconn
except ImportError as e:
sys.exit('ERROR: Cannot import modules. Please check that you have sourced greenplum_path.sh. Detail: ' + str(e))
# constants
# -----------------------------------------------------------------------------
_help = """
Run optimizer bitmap calibration tests. Optionally create the tables before running, and drop them afterwards.
This explains and runs a series of queries and reports the estimated and actual costs.
The results can be copied and pasted into a spreadsheet for further processing.
"""
TABLE_NAME_PATTERN = r"cal_txtest"
NDV_TABLE_NAME_PATTERN = r"cal_ndvtest"
BFV_TABLE_NAME_PATTERN = r"cal_bfvtest"
WIDE_TABLE_NAME_PATTERN = r"cal_widetest"
BRIN_TABLE_NAME_PATTERN = r"cal_brintest"
TABLE_SCAN = "table_scan"
TABLE_SCAN_PATTERN = r"Seq Scan"
TABLE_SCAN_PATTERN_V5 = r"Table Scan"
INDEX_SCAN = "index_scan"
INDEX_SCAN_PATTERN = r"> Index Scan"
INDEX_SCAN_PATTERN_V5 = r"> Index Scan"
INDEX_ONLY_SCAN = "indexonly_scan"
INDEX_ONLY_SCAN_PATTERN = r"> Index Only Scan"
INDEX_ONLY_SCAN_PATTERN_V5 = r"> Index Only Scan"
BITMAP_SCAN = "bitmap_scan"
BITMAP_SCAN_PATTERN = r"Bitmap Heap Scan"
BITMAP_SCAN_PATTERN_V5 = r"Bitmap Table Scan"
HASH_JOIN = "hash_join"
HASH_JOIN_PATTERN = r"Hash Join"
HASH_JOIN_PATTERN_V5 = r"Hash Join"
NL_JOIN = "nl_join"
NL_JOIN_PATTERN = r"Nested Loop"
NL_JOIN_PATTERN_V5 = r"Nested Loop"
FALLBACK_PLAN = "fallback"
FALLBACK_PATTERN = "Postgres query optimizer"
FALLBACK_PATTERN_V5 = "legacy query optimizer"
OPTIMIZER_DEFAULT_PLAN = "optimizer"
# global variables
# -----------------------------------------------------------------------------
# constants
# only consider optimizer errors beyond x * sigma (standard deviation) as significant
glob_sigma_diff = 3
glob_log_file = None
glob_exe_timeout = 40000
glob_gpdb_major_version = 7
glob_dim_table_rows = 10000
# global variables that may be modified
glob_verbose = False
glob_rowcount = -1
glob_appendonly = False
# SQL statements, DDL and DML
# -----------------------------------------------------------------------------
_drop_tables = """
DROP TABLE IF EXISTS cal_txtest, cal_temp_ids, cal_dim, cal_bfvtest, cal_bfv_dim, cal_ndvtest, cal_widetest;
"""
# create the table. Parameters:
# - WITH clause (optional), for append-only tables
_create_cal_table = """
CREATE TABLE cal_txtest(id int,
btreeunique int,
btree10 int,
btree100 int,
btree1000 int,
btree10000 int,
bitmap10 int,
bitmap100 int,
bitmap1000 int,
bitmap10000 int,
txt text)
%s
DISTRIBUTED BY (id);
"""
_create_bfv_table = """
CREATE TABLE cal_bfvtest (col1 integer,
wk_id int,
id integer)
%s
DISTRIBUTED BY (col1);
"""
_create_ndv_table = """
CREATE TABLE cal_ndvtest (id int, val int)
%s
DISTRIBUTED BY (id);
"""
_create_brin_table = """
CREATE TABLE cal_brintest(id int,
clust_10 int,
clust_100 int,
clust_1000 int,
clust_10000 int,
clust_uniq int,
rand_10 int,
rand_100 int,
rand_1000 int,
rand_10000 int,
rand_uniq int,
txt text)
%s
DISTRIBUTED BY (id);
"""
_with_appendonly = """
WITH (appendonly=true)
"""
_create_other_tables = ["""
CREATE TABLE cal_temp_ids(f_id int, f_rand double precision) DISTRIBUTED BY (f_id);
""",
"""
CREATE TABLE cal_dim(dim_id int,
dim_id2 int,
txt text)
DISTRIBUTED BY (dim_id);
""",
"""
CREATE TABLE cal_bfv_dim (id integer, col2 integer) DISTRIBUTED BY (id);
"""]
# insert into temp table. Parameters:
# - integer stop value (suggested value is 10,000,000)
_insert_into_temp = """
INSERT INTO cal_temp_ids SELECT x, random() FROM (SELECT * FROM generate_series(1,%d)) T(x);
"""
_insert_into_table = """
INSERT INTO cal_txtest
SELECT f_id,
f_id,
f_id%10 + 1,
f_id%100 + 1,
f_id%1000 + 1,
f_id%10000 + 1,
f_id%10 + 1,
f_id%100 + 1,
f_id%1000 + 1,
f_id%10000 + 1,
repeat('a', 960)
FROM cal_temp_ids
order by f_rand;
"""
# use a row_number() function to create column values that are strongly correlated
# to the physical order of the rows on disk
_insert_into_brin_table = """
INSERT INTO cal_brintest
SELECT ordered_id,
ceil(ordered_id*(10.0/{rows})),
ceil(ordered_id*(100.0/{rows})),
ceil(ordered_id*(1000.0/{rows})),
ceil(ordered_id*(10000.0/{rows})),
ordered_id,
f_id%10 + 1,
f_id%100 + 1,
f_id%1000 + 1,
f_id%10000 + 1,
f_id,
repeat('a', 956)
FROM (select row_number() over(order by f_rand) as ordered_id, f_id, f_rand from cal_temp_ids) src
order by f_rand;
"""
_insert_into_other_tables = """
INSERT INTO cal_dim SELECT x, x, repeat('d', 100) FROM (SELECT * FROM generate_series(%d,%d)) T(x);
"""
_create_index_arr = ["""
CREATE INDEX cal_txtest_i_bitmap_10 ON cal_txtest USING bitmap(bitmap10);
""",
"""
CREATE INDEX cal_txtest_i_bitmap_100 ON cal_txtest USING bitmap(bitmap100);
""",
"""
CREATE INDEX cal_txtest_i_bitmap_1000 ON cal_txtest USING bitmap(bitmap1000);
""",
"""
CREATE INDEX cal_txtest_i_bitmap_10000 ON cal_txtest USING bitmap(bitmap10000);
""",
]
_create_bfv_index_arr = ["""
CREATE INDEX idx_cal_bfvtest_bitmap ON cal_bfvtest USING bitmap(id);
""",
]
_create_ndv_index_arr = ["""
CREATE INDEX cal_ndvtest_bitmap ON cal_ndvtest USING bitmap(val);
""",
]
_create_btree_indexes_arr = ["""
CREATE INDEX cal_txtest_i_btree_unique ON cal_txtest USING btree(btreeunique);
""",
"""
CREATE INDEX cal_txtest_i_btree_10 ON cal_txtest USING btree(btree10);
""",
"""
CREATE INDEX cal_txtest_i_btree_100 ON cal_txtest USING btree(btree100);
""",
"""
CREATE INDEX cal_txtest_i_btree_1000 ON cal_txtest USING btree(btree1000);
""",
"""
CREATE INDEX cal_txtest_i_btree_10000 ON cal_txtest USING btree(btree10000);
""",
"""
CREATE INDEX idx_cal_bfvtest_btree ON cal_bfvtest USING btree(id);
""",
"""
CREATE INDEX cal_ndvtest_btree ON cal_ndvtest USING btree(val);
""",
]
_create_brin_index_arr = ["""
CREATE INDEX cal_brintest_brin ON cal_brintest USING brin(
id, clust_10, clust_100, clust_1000, clust_10000, clust_uniq, rand_10, rand_100, rand_1000, rand_10000, rand_uniq, txt)
WITH(pages_per_range=4);
""",
]
_analyze_table = """
ANALYZE cal_txtest;
ANALYZE cal_brintest;
"""
_allow_system_mods = """
SET allow_system_table_mods to on;
"""
_allow_system_mods_v5 = """
SET allow_system_table_mods to 'dml';
"""
# Make sure pg_statistics and pg_class have accurate statistics, so that the cardinality estimates we get are very precise
_update_pg_class = """
UPDATE pg_class
SET reltuples = %i
WHERE relname = '%s';
"""
# add an MCV or histogram (stakind1 = 1 or 2) and a correlation (stakind2 = 3) value
_update_pg_stats = """
UPDATE pg_statistic
SET stadistinct = %f,
stakind1 = %d,
stanumbers1 = %s,
stavalues1 = %s,
stakind2 = 3,
stanumbers2 = '{ %f }',
stavalues2 = NULL,
stakind3 = 0,
stanumbers3 = NULL,
stavalues3 = NULL,
stakind4 = 0,
stanumbers4 = NULL,
stavalues4 = NULL
WHERE starelid = '%s'::regclass AND staattnum = %i;
"""
# columns to fix, in the format (table name, column name, attnum, ndv, num rows, correlation)
# use -1 as the NDV for unique columns and use -1 for the variable number of rows in the fact table
_stats_cols_to_fix = [
('cal_txtest', 'id', 1, -1, -1, 0.0),
('cal_txtest', 'btreeunique', 2, -1, -1, 0.0),
('cal_txtest', 'btree10', 3, 10, -1, 0.0),
('cal_txtest', 'btree100', 4, 100, -1, 0.0),
('cal_txtest', 'btree1000', 5, 1000, -1, 0.0),
('cal_txtest', 'btree10000', 6, 10000, -1, 0.0),
('cal_txtest', 'bitmap10', 7, 10, -1, 0.0),
('cal_txtest', 'bitmap100', 8, 100, -1, 0.0),
('cal_txtest', 'bitmap1000', 9, 1000, -1, 0.0),
('cal_txtest', 'bitmap10000', 10, 10000, -1, 0.0),
('cal_dim', 'dim_id', 1, -1, glob_dim_table_rows, 0.0),
('cal_dim', 'dim_id2', 2, -1, glob_dim_table_rows, 0.0),
('cal_brintest','id', 1, -1, -1, 1.0),
('cal_brintest','clust_10', 2, 10, -1, 1.0),
('cal_brintest','clust_100', 3, 100, -1, 1.0),
('cal_brintest','clust_1000', 4, 1000, -1, 1.0),
('cal_brintest','clust_10000', 5, 10000, -1, 1.0),
('cal_brintest','clust_uniq', 6, -1, -1, 1.0),
('cal_brintest','rand_10', 7, 10, -1, 0.0),
('cal_brintest','rand_100', 8, 100, -1, 0.0),
('cal_brintest','rand_1000', 9, 1000, -1, 0.0),
('cal_brintest','rand_10000', 10, 10000, -1, 0.0),
('cal_brintest','rand_uniq', 11, -1, -1, 0.0)]
# deal with command line arguments
# -----------------------------------------------------------------------------
def parseargs():
parser = argparse.ArgumentParser(description=_help)
parser.add_argument("tests", metavar="TEST", choices=[[], "all", "none", "bitmap_scan_tests", "btree_ao_scan_tests",
"bitmap_ndv_scan_tests", "index_join_tests", "bfv_join_tests",
"index_only_scan_tests", "brin_tests"],
nargs="*",
help="Run these tests (all, none, bitmap_scan_tests, btree_ao_scan_tests, bitmap_ndv_scan_tests, "
"index_join_tests, bfv_join_tests, index_only_scan_tests, brin_tests), default is none")
parser.add_argument("--create", action="store_true",
help="Create the tables to use in the test")
parser.add_argument("--execute", type=int, default="0",
help="Number of times to execute queries, 0 (the default) means explain only")
parser.add_argument("--drop", action="store_true",
help="Drop the tables used in the test when finished")
parser.add_argument("--verbose", action="store_true",
help="Print more verbose output")
parser.add_argument("--logFile", default="",
help="Log diagnostic output to a file")
parser.add_argument("--host", default="",
help="Host to connect to (default is localhost or $PGHOST, if set).")
parser.add_argument("--port", type=int, default="0",
help="Port on the host to connect to (default is 0 or $PGPORT, if set)")
parser.add_argument("--dbName", default="",
help="Database name to connect to")
parser.add_argument("--appendOnly", action="store_true",
help="Create an append-only table. Default is a heap table")
parser.add_argument("--numRows", type=int, default="10000000",
help="Number of rows to INSERT INTO the table (default is 10 million)")
parser.set_defaults(verbose=False, filters=[], slice=(None, None))
# Parse the command line arguments
args = parser.parse_args()
return args, parser
def log_output(str):
if glob_verbose:
print(str)
if glob_log_file != None:
glob_log_file.write(str + "\n")
# SQL related methods
# -----------------------------------------------------------------------------
def connect(host, port_num, db_name):
try:
dburl = dbconn.DbURL(hostname=host, port=port_num, dbname=db_name)
conn = dbconn.connect(dburl, encoding="UTF8", unsetSearchPath=False)
except Exception as e:
print(("Exception during connect: %s" % e))
quit()
return conn
def select_version(conn):
global glob_gpdb_major_version
sqlStr = "SELECT version()"
curs = dbconn.query(conn, sqlStr)
rows = curs.fetchall()
for row in rows:
log_output(row[0])
glob_gpdb_major_version = int(re.sub(".*Greenplum Database ([0-9]*)\..*", "\\1", row[0]))
log_output("GPDB major version is %d" % glob_gpdb_major_version)
log_output("Backend pid:")
sqlStr = "SELECT pg_backend_pid()"
curs = dbconn.query(conn, sqlStr)
rows = curs.fetchall()
for row in rows:
log_output(str(row[0]))
def execute_sql(conn, sqlStr):
try:
log_output("")
log_output("Executing query: %s" % sqlStr)
dbconn.execSQL(conn, sqlStr)
except Exception as e:
print("")
print(("Error executing query: %s; Reason: %s" % (sqlStr, e)))
dbconn.execSQL(conn, "abort")
def select_first_int(conn, sqlStr):
try:
log_output("")
log_output("Executing query: %s" % sqlStr)
curs = dbconn.query(conn, sqlStr)
rows = curs.fetchall()
for row in rows:
return int(row[0])
except Exception as e:
print("")
print(("Error executing query: %s; Reason: %s" % (sqlStr, e)))
dbconn.execSQL(conn, "abort")
def execute_sql_arr(conn, sqlStrArr):
for sqlStr in sqlStrArr:
execute_sql(conn, sqlStr)
def execute_and_commit_sql(conn, sqlStr):
execute_sql(conn, sqlStr)
commit_db(conn)
def commit_db(conn):
execute_sql(conn, "commit")
# run an SQL statement and return the elapsed wallclock time, in seconds
def timed_execute_sql(conn, sqlStr):
start = time.time()
num_rows = select_first_int(conn, sqlStr)
end = time.time()
elapsed_time_in_msec = round((end - start) * 1000)
log_output("Elapsed time (msec): %d, rows: %d" % (elapsed_time_in_msec, num_rows))
return elapsed_time_in_msec, num_rows
# run an SQL statement n times, unless it takes longer than a timeout
def timed_execute_n_times(conn, sqlStr, exec_n_times):
sum_exec_times = 0.0
sum_square_exec_times = 0.0
e = 0
act_num_exes = exec_n_times
num_rows = -1
while e < act_num_exes:
exec_time, local_num_rows = timed_execute_sql(conn, sqlStr)
e = e + 1
sum_exec_times += exec_time
sum_square_exec_times += exec_time * exec_time
if num_rows >= 0 and local_num_rows != num_rows:
log_output("Inconsistent number of rows returned: %d and %d" % (num_rows, local_num_rows))
num_rows = local_num_rows
if exec_time > glob_exe_timeout:
# we exceeded the timeout, don't keep executing this long query
act_num_exes = e
log_output("Query %s exceeded the timeout of %d seconds" % (sqlStr, glob_exe_timeout))
# compute mean and standard deviation of the execution times
mean = sum_exec_times / act_num_exes
if exec_n_times == 1:
# be safe, avoid any rounding errors
variance = 0.0
else:
variance = sum_square_exec_times / act_num_exes - mean * mean
return (round(mean, 3), round(math.sqrt(variance), 3), act_num_exes, num_rows)
# Explain a query and find a table scan or index scan in an explain output
# return the scan type and the corresponding cost.
# Use this for scan-related tests.
def explain_index_scan(conn, sqlStr):
cost = -1.0
scan_type = ""
try:
log_output("")
log_output("Executing query: %s" % ("explain " + sqlStr))
exp_curs = dbconn.query(conn, "explain " + sqlStr)
rows = exp_curs.fetchall()
table_scan_pattern = TABLE_SCAN_PATTERN
index_scan_pattern = INDEX_SCAN_PATTERN
index_only_scan_pattern = INDEX_ONLY_SCAN_PATTERN
bitmap_scan_pattern = BITMAP_SCAN_PATTERN
fallback_pattern = FALLBACK_PATTERN
if (glob_gpdb_major_version) <= 5:
table_scan_pattern = TABLE_SCAN_PATTERN_V5
index_scan_pattern = INDEX_SCAN_PATTERN_V5
index_only_scan_pattern = INDEX_ONLY_SCAN_PATTERN_V5
bitmap_scan_pattern = BITMAP_SCAN_PATTERN_V5
fallback_pattern = FALLBACK_PATTERN_V5
for row in rows:
log_output(row[0])
if (re.search(TABLE_NAME_PATTERN, row[0]) or re.search(NDV_TABLE_NAME_PATTERN, row[0]) or
re.search(WIDE_TABLE_NAME_PATTERN, row[0]) or re.search(BRIN_TABLE_NAME_PATTERN, row[0])):
if re.search(bitmap_scan_pattern, row[0]):
scan_type = BITMAP_SCAN
cost = cost_from_explain_line(row[0])
elif re.search(index_scan_pattern, row[0]):
scan_type = INDEX_SCAN
cost = cost_from_explain_line(row[0])
elif re.search(index_only_scan_pattern, row[0]):
scan_type = INDEX_ONLY_SCAN
cost = cost_from_explain_line(row[0])
elif re.search(table_scan_pattern, row[0]):
scan_type = TABLE_SCAN
cost = cost_from_explain_line(row[0])
elif re.search(fallback_pattern, row[0]):
log_output("*** ERROR: Fallback")
scan_type = FALLBACK_PLAN
except Exception as e:
log_output("\n*** ERROR explaining query:\n%s;\nReason: %s" % ("explain " + sqlStr, e))
return (scan_type, cost)
# Explain a query and find a join in an explain output
# return the scan type and the corresponding cost.
# Use this for scan-related tests.
def explain_join_scan(conn, sqlStr):
cost = -1.0
scan_type = ""
try:
log_output("")
log_output("Executing query: %s" % ("explain " + sqlStr))
exp_curs = dbconn.query(conn, "explain " + sqlStr)
rows = exp_curs.fetchall()
hash_join_pattern = HASH_JOIN_PATTERN
nl_join_pattern = NL_JOIN_PATTERN
table_scan_pattern = TABLE_SCAN_PATTERN
index_scan_pattern = INDEX_SCAN_PATTERN
index_only_scan_pattern = INDEX_ONLY_SCAN_PATTERN
bitmap_scan_pattern = BITMAP_SCAN_PATTERN
fallback_pattern = FALLBACK_PATTERN
if (glob_gpdb_major_version) <= 5:
hash_join_pattern = HASH_JOIN_PATTERN_V5
nl_join_pattern = NL_JOIN_PATTERN_V5
table_scan_pattern = TABLE_SCAN_PATTERN_V5
index_only_scan_pattern = INDEX_ONLY_SCAN_PATTERN_V5
bitmap_scan_pattern = BITMAP_SCAN_PATTERN_V5
fallback_pattern = FALLBACK_PATTERN_V5
# save the cost of the join above the scan type
for row in rows:
log_output(row[0])
if re.search(nl_join_pattern, row[0]):
cost = cost_from_explain_line(row[0])
elif re.search(hash_join_pattern, row[0]):
cost = cost_from_explain_line(row[0])
# mark the scan type used underneath the join
if re.search(TABLE_NAME_PATTERN, row[0]) or re.search(BFV_TABLE_NAME_PATTERN, row[0]):
if re.search(bitmap_scan_pattern, row[0]):
scan_type = BITMAP_SCAN
elif re.search(index_scan_pattern, row[0]):
scan_type = INDEX_SCAN
elif re.search(index_only_scan_pattern, row[0]):
scan_type = INDEX_ONLY_SCAN
elif re.search(table_scan_pattern, row[0]):
scan_type = TABLE_SCAN
elif re.search(fallback_pattern, row[0]):
log_output("*** ERROR: Fallback")
scan_type = FALLBACK_PLAN
except Exception as e:
log_output("\n*** ERROR explaining query:\n%s;\nReason: %s" % ("explain " + sqlStr, e))
return (scan_type, cost)
# extract the cost c from the cost=x..c in an explain line
def cost_from_explain_line(line):
return float(re.sub(r".*\.\.([0-9.]+) .*", r"\1", line))
# methods that run queries with varying parameters, recording results
# and finding crossover points
# -----------------------------------------------------------------------------
# iterate over one parameterized query, using a range of parameter values, explaining and (optionally) executing the query
def find_crossover(conn, lowParamValue, highParamLimit, setup, parameterizeMethod, explain_method, reset_method,
plan_ids, force_methods, execute_n_times):
# expects the following:
# - conn: A connection
# - lowParamValue: The lowest (integer) value to try for the parameter
# - highParamLimit: The highest (integer) value to try for the parameter + 1
# - setup: A method that runs any sql needed for setup before a particular select run, given a parameterized query and a parameter value
# - parameterizeMethod: A method to generate the actual query text, given a parameterized query and a parameter value
# - explain_method: A method that takes a connection and an SQL string and returns a tuple (plan, cost)
# - reset_method: A method to reset all gucs and similar switches, to get the default plan by the optimizer
# the method takes one parameter, the connection
# - plan_ids: A list with <p> plan ids returned by explain_method. Usually the number <p> is 2.
# - force_methods: A list with <p> methods to force each plan id in the plan_ids array (these methods usually set gucs)
# each methods takes one parameter, the connection
# - execute_n_times: The number of times to execute the query (0 means don't execute, n>0 means execute n times)
# returns the following:
# - An explain dictionary, containing a mapping between a subset of the parameter values and result tuples, each result tuple consisting of
# <p> + 2 values:
# - the plan id chosen by default by the optimizer
# - the estimated cost for the optimal plan, chosen by the optimizer
# - p values for the estimated cost when forcing plan i, 0 <= i < p
# - An execution dictionary that, if execute_n_times is > 0, contains a mapping of a subset of the parameter values and plan ids
# to execution times and standard deviations in execution times: (param_value, plan_id) -> (mean_exec_time, stddev_exec_time)
# - mean_exec_time: average execution time (in seconds, rounded to milliseconds) for the plan
# - stddev_exec_time: standard deviation of the different execution times for this parameter value and plan
# - A list of error messages
explainDict = {}
execDict = {}
errMessages = []
timedOutDict = {}
expCrossoverLow = lowParamValue - 1
reset_method(conn)
# determine the increment
incParamValue = (highParamLimit - lowParamValue) // 10
if incParamValue == 0:
incParamValue = 1
elif highParamLimit <= lowParamValue:
errMessages.append(
"Low parameter value %d must be less than high parameter limit %d" % (lowParamValue, highParamLimit))
return (explainDict, execDict, errMessages)
# first part, run through the parameter values and determine the plan and cost chosen by the optimizer
for paramValue in range(lowParamValue, highParamLimit, incParamValue):
# do any setup required
setupString = setup(paramValue)
execute_sql(conn, setupString)
# explain the query and record which plan it chooses and what the cost is
sqlString = parameterizeMethod(paramValue)
(plan, cost) = explain_method(conn, sqlString)
explainDict[paramValue] = (plan, cost)
log_output("For param value %d the optimizer chose %s with a cost of %f" % (paramValue, plan, cost))
# execute the query, if requested
if execute_n_times > 0:
timed_execute_and_check_timeout(conn, sqlString, execute_n_times, paramValue, OPTIMIZER_DEFAULT_PLAN,
execDict, timedOutDict, errMessages)
# second part, force different plans and record the costs
for plan_num in range(0, len(plan_ids)):
plan_id = plan_ids[plan_num]
reset_method(conn)
log_output("----------- Now forcing a %s plan --------------" % plan_id)
force_methods[plan_num](conn)
for paramValue in range(lowParamValue, highParamLimit, incParamValue):
# do any setup required
setupString = setup(paramValue)
execute_sql(conn, setupString)
# explain the query with the forced plan
sqlString = parameterizeMethod(paramValue)
(plan, cost) = explain_method(conn, sqlString)
if plan_id != plan:
errMessages.append("For parameter value %d we tried to force a %s plan but got a %s plan." % (
paramValue, plan_id, plan))
log_output("For parameter value %d we tried to force a %s plan but got a %s plan." % (
paramValue, plan_id, plan))
# update the result dictionary
resultList = list(explainDict[paramValue])
defaultPlanCost = resultList[1]
# sanity check, the forced plan shouldn't have a cost that is lower than the default plan cost
if defaultPlanCost > cost * 1.1:
errMessages.append(
"For parameter value %d and forced %s plan we got a cost of %f that is lower than the default cost of %f for the default %s plan." % (
paramValue, plan_id, cost, defaultPlanCost, resultList[0]))
resultList.append(cost)
explainDict[paramValue] = tuple(resultList)
log_output("For param value %d we forced %s with a cost of %f" % (paramValue, plan, cost))
# execute the forced plan
if execute_n_times > 0:
# execute the query <execute_n_times> times and record the mean and stddev of the time in execDict
timed_execute_and_check_timeout(conn, sqlString, execute_n_times, paramValue, plan_id, execDict,
timedOutDict, errMessages)
# cleanup at exit
reset_method(conn)
return (explainDict, execDict, errMessages)
# Check for plans other than the optimizer-chosen plan that are significantly
# better. Return the plan id and how many percent better that plan is or return ("", 0).
def checkForOptimizerErrors(paramValue, chosenPlan, plan_ids, execDict):
# check whether a plan other that the optimizer's choice was better
if chosenPlan in plan_ids:
# take the best of the execution times (optimizer choice and the same plan forced)
# and use the larger of the standard deviations
defaultExeTime = 1E6
defaultStdDev = 0.0
if (paramValue, OPTIMIZER_DEFAULT_PLAN) in execDict:
defaultExeTime, defaultStdDev, numRows = execDict[(paramValue, OPTIMIZER_DEFAULT_PLAN)]
if (paramValue, chosenPlan) in execDict:
forcedExeTime, forcedStdDev, numRows = execDict[(paramValue, chosenPlan)]
if forcedExeTime < defaultExeTime:
defaultExeTime = forcedExeTime
defaultStdDev = forcedStdDev
for pl in plan_ids:
if (paramValue, pl) in execDict:
altExeTime, altStdDev, numRows = execDict[(paramValue, pl)]
# The execution times tend to be fairly unreliable. Try to avoid false positives by
# requiring a significantly better alternative, measured in standard deviations.
if altExeTime + glob_sigma_diff * max(defaultStdDev, altStdDev) < defaultExeTime:
optimizerError = 100.0 * (defaultExeTime - altExeTime) / defaultExeTime
# yes, plan pl is significantly better than the optimizer default choice
return (pl, round(optimizerError, 1))
elif chosenPlan == FALLBACK_PLAN:
return (FALLBACK_PLAN, -1.0)
# the optimizer chose the right plan (at least we have not enough evidence to the contrary)
return ("", 0.0)
# print the results of one test run
def print_results(testTitle, explainDict, execDict, errMessages, plan_ids, execute_n_times):
# print out the title of the test
print("")
print(testTitle)
print("")
exeTimes = len(execDict) > 0
# make a list of plan ids with the default plan ids as first entry
plan_ids_with_default = [OPTIMIZER_DEFAULT_PLAN]
plan_ids_with_default.extend(plan_ids)
# print a header row
headerList = ["Parameter value", "Plan chosen by optimizer", "Cost"]
for p_id in plan_ids:
headerList.append("Cost of forced %s plan" % p_id)
if exeTimes:
headerList.append("Best execution plan")
headerList.append("Optimization error (pct)")
headerList.append("Execution time for default plan (ms)")
for p_id in plan_ids:
headerList.append("Execution time for forced %s plan (ms)" % p_id)
if execute_n_times > 1:
headerList.append("Std dev default")
for p_id in plan_ids:
headerList.append("Std dev %s" % p_id)
headerList.append("Selectivity pct")
print((", ".join(headerList)))
# sort the keys of the dictionary by parameter value
sorted_params = sorted(explainDict.keys())
# for each parameter value, print one line with comma-separated values
for p_val in sorted_params:
# add the explain-related values
vals = explainDict[p_val]
resultList = [str(p_val)]
for v in vals:
resultList.append(str(v))
# add the execution-related values, if applicable
if exeTimes:
# calculate the optimizer error
bestPlan, optimizerError = checkForOptimizerErrors(p_val, vals[0], plan_ids, execDict)
resultList.append(bestPlan)
resultList.append(str(optimizerError))
stddevList = []
num_rows = -1
# our execution times will be a list of 2* (p+1) + 1 items,
# (default exe time, forced exe time plan 1 ... p, stddev for default time, stddevs for plans 1...p, selectivity)
# now loop over the list of p+1 plan ids
for plan_id in plan_ids_with_default:
if (p_val, plan_id) in execDict:
# we did execute the query for this, append the avg time
# right away and save the standard deviation for later
mean, stddev, local_num_rows = execDict[(p_val, plan_id)]
resultList.append(str(mean))
stddevList.append(str(stddev))
if num_rows >= 0 and local_num_rows != num_rows:
errMessages.append("Inconsistent number of rows for parameter value %d: %d and %d" % (p_val, num_rows, local_num_rows))
num_rows = local_num_rows
else:
# we didn't execute this query, add blank values
resultList.append("")
stddevList.append("")
if execute_n_times > 1:
# now add the standard deviations to the end of resultList
resultList.extend(stddevList)
# finally, the selectivity in percent
resultList.append(str((100.0 * num_rows) / glob_rowcount))
# print a comma-separated list of result values (CSV)
print((", ".join(resultList)))
# if there are any errors, print them at the end, leaving an empty line between the result and the errors
if (len(errMessages) > 0):
print("")
print(("%d diagnostic message(s):" % len(errMessages)))
for e in errMessages:
print(e)
# execute a query n times, with a guard against long-running queries,
# and record the result in execDict and any errors in errMessages
def timed_execute_and_check_timeout(conn, sqlString, execute_n_times, paramValue, plan_id, execDict, timedOutDict,
errMessages):
# timedOutDict contains a record of queries that have previously timed out:
# plan_id -> (lowest param value for timeout, highest value for timeout, direction)
# right now we ignore low/high values and direction (whether the execution increases or decreases with
# increased parameter values)
if plan_id in timedOutDict:
# this plan has timed out with at least one parameter value, decide what to do
paramValLow, paramValHigh, direction = timedOutDict[plan_id]
# for now, just return, once we time out for a plan we give up
log_output("Not executing the %s plan for paramValue %d, due to previous timeout" % (plan_id, paramValue))
return
# execute the query
mean, stddev, num_execs, num_rows = timed_execute_n_times(conn, sqlString, execute_n_times)
# record the execution stats
execDict[(paramValue, plan_id)] = (mean, stddev, num_rows)
# check for timeouts
if num_execs < execute_n_times or mean > glob_exe_timeout:
# record the timeout, without worrying about low/high values or directions for now
timedOutDict[plan_id] = (paramValue, paramValue, "unknown_direction")
errMessages.append(
"The %s plan for parameter value %d took more than the allowed timeout, it was executed only %d time(s)" %
(plan_id, paramValue, num_execs))
# Definition of various test suites
# -----------------------------------------------------------------------------
# How to add a test:
#
# - Define some queries to run as text constants below. Use the tables
# created by this program or add more tables to be created.
# - Define methods that parameterize these test queries, given an integer
# parameter value in a range that you can define later.
# - Use the predefined types of plans (TABLE_SCAN, INDEX_SCAN, INDEX_ONLY_SCAN) or add your
# own plan types above. Note that you will also need to change or implement
# an explain method that takes a query, explains it, and returns the plan
# type and the estimated cost.
# - Define methods to force the desired plan types and also a method to reset
# the connection so it doesn't force any of these plans.
# - Now you are ready to add another test, using method run_bitmap_index_tests()
# as an example.
# - Add your test as a choice for the "tests" command line argument and add a
# call to your test to the main program
# SQL test queries
# -----------------------------------------------------------------------------
# ------------ SQL test queries - bitmap index scan --------------
# GUC set statements
_reset_index_scan_forces = ["""
SELECT enable_xform('CXformImplementBitmapTableGet');
""",
"""
SELECT enable_xform('CXformGet2TableScan');
""",
"""
SELECT enable_xform('CXformIndexGet2IndexScan');
""" ]
_force_sequential_scan = ["""
SELECT disable_xform('CXformImplementBitmapTableGet');
"""]
_force_index_scan = ["""
SELECT disable_xform('CXformGet2TableScan');
"""]
_force_index_only_scan = ["SELECT disable_xform('CXformGet2TableScan');",
"SELECT disable_xform('CXformIndexGet2IndexScan');"]
_reset_index_join_forces = ["""
SELECT enable_xform('CXformPushGbBelowJoin');
""",
"""
RESET optimizer_enable_indexjoin;
""",
"""
RESET optimizer_enable_hashjoin;
"""]
_force_hash_join = ["""
SELECT disable_xform('CXformPushGbBelowJoin');
""",
"""
SET optimizer_enable_indexjoin to off;
"""]
_force_index_nlj = ["""
SELECT disable_xform('CXformPushGbBelowJoin');
""",
"""
SET optimizer_enable_hashjoin to off;
"""]
# setup statements
_insert_into_bfv_tables = """
TRUNCATE cal_bfvtest;
TRUNCATE cal_bfv_dim;
INSERT INTO cal_bfvtest SELECT col1, col1, col1 FROM (SELECT generate_series(1,%d) col1)a;
INSERT INTO cal_bfv_dim SELECT col1, col1 FROM (SELECT generate_series(1,%d,3) col1)a;
ANALYZE cal_bfvtest;
ANALYZE cal_bfv_dim;
"""
_insert_into_ndv_tables = """
TRUNCATE cal_ndvtest;
INSERT INTO cal_ndvtest SELECT i, i %% %d FROM (SELECT generate_series(1,1000000) i)a;
ANALYZE cal_ndvtest;
"""
# query statements
_bitmap_select = """
SELECT count(*) {sel}
FROM cal_txtest
WHERE {col} BETWEEN 0 AND {par};
"""
_bitmap_select_multi = """
SELECT count(*) {sel}
FROM cal_txtest
WHERE {col} = 0 OR {col} BETWEEN 2 AND {par}+1;
"""
_btree_select_unique_in = """
SELECT count(*) {sel}
FROM cal_txtest
WHERE {col} IN ( {inlist} );
"""
_bitmap_index_join = """
SELECT count(*) %s
FROM cal_txtest f JOIN cal_dim d ON f.bitmap10000 = d.dim_id
WHERE d.dim_id2 BETWEEN 0 AND %d;
"""
_btree_index_join = """
SELECT count(*) %s
FROM cal_txtest f JOIN cal_dim d ON f.btree10000 = d.dim_id
WHERE d.dim_id2 BETWEEN 0 AND %d;
"""
_bfv_join = """
SELECT count(*)
FROM cal_bfvtest ft, cal_bfv_dim dt1
WHERE ft.id = dt1.id;
"""
_bitmap_index_ndv = """
SELECT count(*)
FROM cal_ndvtest
WHERE val <= 1000000;
"""
_brin_select_range = """
SELECT count(*) {sel}
FROM cal_brintest
WHERE {col} BETWEEN 0 AND {par};
"""
_brin_select_multi = """
SELECT count(*) {sel}
FROM cal_brintest
WHERE {col} = 0 OR {col} BETWEEN 2 AND {par}+1;
"""
# Parameterize methods for the test queries above
# -----------------------------------------------------------------------------
# bitmap index scan with 0...100 % of values, for parameter values 0...10, in 10 % increments
def parameterize_bitmap_index_10_narrow(paramValue):
return _bitmap_select.format(sel="", col="bitmap10", par=paramValue)
def parameterize_bitmap_index_10_wide(paramValue):
return _bitmap_select.format(sel=", max(txt)", col="bitmap10", par=paramValue)
# bitmap index scan with 0...100 % of values, for parameter values 0...10,000, in .01 % increments
def parameterize_bitmap_index_10000_narrow(paramValue):
return _bitmap_select.format(sel="", col="bitmap10000", par=paramValue)
def parameterize_bitmap_index_10000_wide(paramValue):
return _bitmap_select.format(sel=", max(txt)", col="bitmap10000", par=paramValue)
# bitmap index scan with 0...100 % of values, for parameter values 0...10,000, in .01 % increments, multiple ranges
def parameterize_bitmap_index_10000_multi_narrow(paramValue):
return _bitmap_select_multi.format(sel="", col="bitmap10000", par=paramValue)
def parameterize_bitmap_index_10000_multi_wide(paramValue):
return _bitmap_select_multi.format(sel=", max(txt)", col="bitmap10000", par=paramValue)
# bitmap index scan on AO btree index with 0...100 % of values, for parameter values 0...10, in 10 % increments
def parameterize_btree_index_unique_narrow(paramValue):
return _bitmap_select.format(sel="", col="btreeunique", par=paramValue)
def parameterize_btree_index_unique_wide(paramValue):
return _bitmap_select.format(sel=", max(txt)", col="btreeunique", par=paramValue)
def parameterize_btree_index_100_narrow(paramValue):
return _bitmap_select.format(sel="", col="btree100", par=paramValue)
def parameterize_btree_index_100_wide(paramValue):
return _bitmap_select.format(sel=", max(txt)", col="btree100", par=paramValue)
# bitmap index scan on AO btree index with 0...100 % of values, for parameter values 0...10,000, in .01 % increments
def parameterize_btree_index_10000_narrow(paramValue):
return _bitmap_select.format(sel="", col="btree10000", par=paramValue)
def parameterize_btree_index_10000_wide(paramValue):
return _bitmap_select.format(sel=", max(txt)", col="btree10000", par=paramValue)
# bitmap index scan on AO btree index with 0...100 % of values, for parameter values 0...10,000, in .01 % increments, multiple ranges
def parameterize_btree_index_10000_multi_narrow(paramValue):
return _bitmap_select_multi.format(sel="", col="btree10000", par=paramValue)
def parameterize_btree_index_10000_multi_wide(paramValue):
return _bitmap_select_multi.format(sel=", max(txt)", col="btree10000", par=paramValue)
def parameterize_btree_unique_in_narrow(paramValue):
inlist = "0"
for p in range(1, paramValue+1):
inlist += ", " + str(5*p)
return _btree_select_unique_in.format(sel="", col="btreeunique", inlist=inlist)
def parameterize_btree_unique_in_wide(paramValue):
inlist = "0"
for p in range(1, paramValue+1):
inlist += ", " + str(5*p)
return _btree_select_unique_in.format(sel=", max(txt)", col="btreeunique", inlist=inlist)
# index join with 0...100 % of fact values, for parameter values 0...10,000, in .01 % increments
def parameterize_bitmap_join_narrow(paramValue):
return _bitmap_index_join % ("", paramValue)
def parameterize_bitmap_join_wide(paramValue):
return _bitmap_index_join % (", max(f.txt)", paramValue)
def parameterize_btree_join_narrow(paramValue):
return _btree_index_join % ("", paramValue)
def parameterize_btree_join_wide(paramValue):
return _btree_index_join % (", max(f.txt)", paramValue)
def parameterize_insert_join_bfv(paramValue):
return _insert_into_bfv_tables % (paramValue, paramValue)
def parameterize_insert_ndv(paramValue):
return _insert_into_ndv_tables % (paramValue)
def parameterize_bitmap_join_bfv(paramValue):
return _bfv_join
def parameterize_bitmap_index_ndv(paramValue):
return _bitmap_index_ndv
# BRIN clustered scan with 0...100 % of values, for parameter values 0...10, in 10 % increments
def parameterize_brin_index_10c_narrow(paramValue):
return _brin_select_range.format(sel="", col="clust_10", par=paramValue)
def parameterize_brin_index_10c_wide(paramValue):
return _brin_select_range.format(sel=", max(txt)", col="clust_10", par=paramValue)
# BRIN clustered scan with 0...100 % of values, for parameter values 0...10,000, in .01 % increments
def parameterize_brin_index_10000c_narrow(paramValue):
return _brin_select_range.format(sel="", col="clust_10000", par=paramValue)
def parameterize_brin_index_10000c_wide(paramValue):
return _brin_select_range.format(sel=", max(txt)", col="clust_10000", par=paramValue)
# BRIN clustered scan with 0...100 % of values, for parameter values 0...10,000, in .01 % increments, multiple ranges
def parameterize_brin_index_10000c_multi_narrow(paramValue):
return _brin_select_multi.format(sel="", col="clust_10000", par=paramValue)
def parameterize_brin_index_10000c_multi_wide(paramValue):
return _brin_select_multi.format(sel=", max(txt)", col="clust_10000", par=paramValue)
# BRIN random scan with 0...100 % of values, for parameter values 0...10, in 10 % increments
def parameterize_brin_index_10r_narrow(paramValue):
return _brin_select_range.format(sel="", col="rand_10", par=paramValue)
def parameterize_brin_index_10r_wide(paramValue):
return _brin_select_range.format(sel=", max(txt)", col="rand_10", par=paramValue)
# BRIN random scan with 0...100 % of values, for parameter values 0...10,000, in .01 % increments
def parameterize_brin_index_10000r_narrow(paramValue):
return _brin_select_range.format(sel="", col="rand_10000", par=paramValue)
def parameterize_brin_index_10000r_wide(paramValue):
return _brin_select_range.format(sel=", max(txt)", col="rand_10000", par=paramValue)
# BRIN random scan with 0...100 % of values, for parameter values 0...10,000, in .01 % increments, multiple ranges
def parameterize_brin_index_10000r_multi_narrow(paramValue):
return _brin_select_multi.format(sel="", col="rand_10000", par=paramValue)
def parameterize_brin_index_10000r_multi_wide(paramValue):
return _brin_select_multi.format(sel=", max(txt)", col="rand_10000", par=paramValue)
def noSetupRequired(paramValue):
return "SELECT 1;"
def explain_bitmap_index(conn, sqlStr):
return explain_index_scan(conn, sqlStr)
def reset_index_test(conn):
execute_sql_arr(conn, _reset_index_scan_forces)
def force_table_scan(conn):
execute_sql_arr(conn, _force_sequential_scan)
def force_bitmap_scan(conn):
execute_sql_arr(conn, _force_index_scan)
def force_index_scan(conn):
execute_sql_arr(conn, _force_index_scan)
def force_index_only_scan(conn):
execute_sql_arr(conn, _force_index_only_scan)
def reset_index_join(conn):
execute_sql_arr(conn, _reset_index_join_forces)
def force_hash_join(conn):
execute_sql_arr(conn, _force_hash_join)
def force_index_join(conn):
execute_sql_arr(conn, _force_index_nlj)
# Helper methods for running tests
# -----------------------------------------------------------------------------
def run_one_bitmap_scan_test(conn, testTitle, paramValueLow, paramValueHigh, setup, parameterizeMethod,
execute_n_times):
log_output("Running bitmap scan test " + testTitle)
plan_ids = [BITMAP_SCAN, TABLE_SCAN]
force_methods = [force_bitmap_scan, force_table_scan]
explainDict, execDict, errors = find_crossover(conn, paramValueLow, paramValueHigh, setup, parameterizeMethod,
explain_bitmap_index, reset_index_test, plan_ids, force_methods,
execute_n_times)
print_results(testTitle, explainDict, execDict, errors, plan_ids, execute_n_times)
def run_one_bitmap_join_test(conn, testTitle, paramValueLow, paramValueHigh, setup, parameterizeMethod,
execute_n_times):
log_output("Running bitmap join test " + testTitle)
plan_ids = [BITMAP_SCAN, TABLE_SCAN]
force_methods = [force_index_join, force_hash_join]
explainDict, execDict, errors = find_crossover(conn, paramValueLow, paramValueHigh, setup, parameterizeMethod,
explain_join_scan, reset_index_join, plan_ids, force_methods,
execute_n_times)
print_results(testTitle, explainDict, execDict, errors, plan_ids, execute_n_times)
def run_one_index_scan_test(conn, testTitle, paramValueLow, paramValueHigh, setup, parameterizeMethod,
execute_n_times):
log_output("Running index scan test " + testTitle)
plan_ids = [INDEX_SCAN, INDEX_ONLY_SCAN]
force_methods = [force_index_scan, force_index_only_scan]
explainDict, execDict, errors = find_crossover(conn, paramValueLow, paramValueHigh, setup, parameterizeMethod,
explain_index_scan, reset_index_test, plan_ids, force_methods,
execute_n_times)
print_results(testTitle, explainDict, execDict, errors, plan_ids, execute_n_times)
def run_one_brin_scan_test(conn, testTitle, paramValueLow, paramValueHigh, setup, parameterizeMethod,
execute_n_times):
log_output("Running BRIN scan test " + testTitle)
plan_ids = [BITMAP_SCAN, TABLE_SCAN]
force_methods = [force_bitmap_scan, force_table_scan]
explainDict, execDict, errors = find_crossover(conn, paramValueLow, paramValueHigh, setup, parameterizeMethod,
explain_bitmap_index, reset_index_test, plan_ids, force_methods,
execute_n_times)
print_results(testTitle, explainDict, execDict, errors, plan_ids, execute_n_times)
# Main driver for the tests
# -----------------------------------------------------------------------------
def run_index_only_scan_tests(conn, execute_n_times):
def setup_wide_table(paramValue):
execute_sql_arr(conn, [
"DROP TABLE IF EXISTS cal_widetest;",
"CREATE TABLE cal_widetest(a int, {})".format(','.join('col' + str(i) + " text" for i in range(1, max(2, paramValue)))),
"CREATE INDEX cal_widetest_index ON cal_widetest(a);",
"TRUNCATE cal_widetest;",
"INSERT INTO cal_widetest SELECT i%50, {} FROM generate_series(1,100000)i;".format(','.join("repeat('a', 1024)" for i in range(1, max(2, paramValue)))),
"VACUUM ANALYZE cal_widetest;"
])
return "select 1;"
def parameterized_method(paramValue):
return """
SELECT count(a)
FROM cal_widetest
WHERE a<25;
"""
run_one_index_scan_test(conn,
"Index Scan Test; Wide table; Narrow index",
1,
6,
setup_wide_table,
parameterized_method,
execute_n_times)
def run_bitmap_index_scan_tests(conn, execute_n_times):
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; NDV=10; selectivity_pct=10*parameter_value; count(*)",
0,
10,
noSetupRequired,
parameterize_bitmap_index_10_narrow,
execute_n_times)
# all full table scan, no crossover
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; NDV=10; selectivity_pct=10*parameter_value; max(txt)",
0,
6,
noSetupRequired,
parameterize_bitmap_index_10_wide,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
600 if glob_appendonly else 20,
noSetupRequired,
parameterize_bitmap_index_10000_narrow,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300 if glob_appendonly else 20,
noSetupRequired,
parameterize_bitmap_index_10000_wide,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
600 if glob_appendonly else 20,
noSetupRequired,
parameterize_bitmap_index_10000_multi_narrow,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300 if glob_appendonly else 20,
noSetupRequired,
parameterize_bitmap_index_10000_multi_wide,
execute_n_times)
def run_bitmap_ndv_scan_tests(conn, execute_n_times):
run_one_bitmap_scan_test(conn,
"Bitmap Scan Test; ndv test; rows=1000000; parameter = insert statement modulo; count(*)",
1,
# modulo ex. would replace x in the following: SELECT i % x FROM generate_series(1,10000)i;
10000, # max here is 10000 (num of rows)
parameterize_insert_ndv,
parameterize_bitmap_index_ndv,
execute_n_times)
def run_btree_ao_index_scan_tests(conn, execute_n_times):
# use the unique btree index (no bitmap equivalent), 0 to 10,000 rows
run_one_bitmap_scan_test(conn,
"Btree Scan Test; unique; selectivity_pct=100*parameter_value/%d; count(*)" % glob_rowcount,
0,
glob_rowcount // 10, # 10% is the max allowed selectivity for a btree scan on an AO table
noSetupRequired,
parameterize_btree_index_unique_narrow,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; unique; selectivity_pct=100*parameter_value/%d; max(txt)" % glob_rowcount,
0,
glob_rowcount // 20,
noSetupRequired,
parameterize_btree_index_unique_wide,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; NDV=100; selectivity_pct=parameter_value; count(*)",
0,
5,
noSetupRequired,
parameterize_btree_index_100_narrow,
execute_n_times)
# all full table scan, no crossover
run_one_bitmap_scan_test(conn,
"Btree Scan Test; NDV=100; selectivity_pct=parameter_value; max(txt)",
0,
5,
noSetupRequired,
parameterize_btree_index_100_wide,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
500,
noSetupRequired,
parameterize_btree_index_10000_narrow,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
1000,
noSetupRequired,
parameterize_btree_index_10000_wide,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
1000,
noSetupRequired,
parameterize_btree_index_10000_multi_narrow,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
1000,
noSetupRequired,
parameterize_btree_index_10000_multi_wide,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; in-list; selectivity_pct=100*parameter_value/%d; count(*)" % glob_rowcount,
0,
5000, # length of IN list
noSetupRequired,
parameterize_btree_unique_in_narrow,
execute_n_times)
run_one_bitmap_scan_test(conn,
"Btree Scan Test; in-list; selectivity_pct=100*parameter_value/%d; max(txt)" % glob_rowcount,
0,
3000, # length of IN list
noSetupRequired,
parameterize_btree_unique_in_wide,
execute_n_times)
def run_index_join_tests(conn, execute_n_times):
run_one_bitmap_join_test(conn,
"Bitmap Join Test; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
400,
noSetupRequired,
parameterize_bitmap_join_narrow,
execute_n_times)
run_one_bitmap_join_test(conn,
"Bitmap Join Test; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300,
noSetupRequired,
parameterize_bitmap_join_wide,
execute_n_times)
run_one_bitmap_join_test(conn,
"Btree Join Test; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
500,
noSetupRequired,
parameterize_btree_join_narrow,
execute_n_times)
run_one_bitmap_join_test(conn,
"Btree Join Test; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
400,
noSetupRequired,
parameterize_btree_join_wide,
execute_n_times)
def run_bfv_join_tests(conn, execute_n_times):
run_one_bitmap_join_test(conn,
"Bitmap Join BFV Test; Large Data; parameter = num rows inserted",
10000, # num of rows inserted
900000,
parameterize_insert_join_bfv,
parameterize_bitmap_join_bfv,
execute_n_times)
def run_brin_tests(conn, execute_n_times):
run_one_brin_scan_test(conn,
"BRIN clustered Scan Test; NDV=10; selectivity_pct=10*parameter_value; count(*)",
0,
10,
noSetupRequired,
parameterize_brin_index_10c_narrow,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN clustered Scan Test; NDV=10; selectivity_pct=10*parameter_value; max(txt)",
0,
6,
noSetupRequired,
parameterize_brin_index_10c_wide,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN clustered Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
600,
noSetupRequired,
parameterize_brin_index_10000c_narrow,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN clustered Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300,
noSetupRequired,
parameterize_brin_index_10000c_wide,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN clustered Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
600,
noSetupRequired,
parameterize_brin_index_10000c_multi_narrow,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN clustered Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300,
noSetupRequired,
parameterize_brin_index_10000c_multi_wide,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN random Scan Test; NDV=10; selectivity_pct=10*parameter_value; count(*)",
0,
10,
noSetupRequired,
parameterize_brin_index_10r_narrow,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN random Scan Test; NDV=10; selectivity_pct=10*parameter_value; max(txt)",
0,
6,
noSetupRequired,
parameterize_brin_index_10r_wide,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN random Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
600,
noSetupRequired,
parameterize_brin_index_10000r_narrow,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN random Scan Test; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300,
noSetupRequired,
parameterize_brin_index_10000r_wide,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN random Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; count(*)",
0,
600,
noSetupRequired,
parameterize_brin_index_10000r_multi_narrow,
execute_n_times)
run_one_brin_scan_test(conn,
"BRIN random Scan Test; multi-range; NDV=10000; selectivity_pct=0.01*parameter_value; max(txt)",
0,
300,
noSetupRequired,
parameterize_brin_index_10000r_multi_wide,
execute_n_times)
# common parts of all test suites, create tables, run tests, drop objects
# -----------------------------------------------------------------------------
# create the table(s), as regular or AO table, and insert num_rows into the main table
def createDB(conn, use_ao, num_rows):
global glob_appendonly
create_options = ""
if use_ao:
create_options = _with_appendonly
glob_appendonly = True
create_cal_table_stmt = _create_cal_table % create_options
create_bfv_table = _create_bfv_table % create_options
create_ndv_table = _create_ndv_table % create_options
create_brin_table = _create_brin_table % create_options
insert_into_temp_stmt = _insert_into_temp % num_rows
insert_into_other_stmt = _insert_into_other_tables % (1, glob_dim_table_rows)
insert_into_brin_table = _insert_into_brin_table.format(rows=num_rows)
execute_sql(conn, _drop_tables)
execute_sql(conn, create_cal_table_stmt)
execute_sql(conn, create_bfv_table)
execute_sql(conn, create_ndv_table)
execute_sql(conn, create_brin_table)
execute_sql_arr(conn, _create_other_tables)
commit_db(conn)
execute_and_commit_sql(conn, insert_into_temp_stmt)
execute_and_commit_sql(conn, _insert_into_table)
commit_db(conn)
execute_and_commit_sql(conn, insert_into_brin_table)
execute_and_commit_sql(conn, insert_into_other_stmt)
commit_db(conn)
execute_sql_arr(conn, _create_index_arr)
execute_sql_arr(conn, _create_bfv_index_arr)
execute_sql_arr(conn, _create_ndv_index_arr)
commit_db(conn)
execute_sql_arr(conn, _create_btree_indexes_arr)
execute_sql_arr(conn, _create_brin_index_arr)
execute_sql(conn, _analyze_table)
commit_db(conn)
def dropDB(conn):
execute_sql(conn, _drop_tables)
# smooth statistics for a single integer column uniformly distributed between 1 and row_count, with a given row count and NDV
#
# For NDVs of 100 or less, list all of them
# For NDVs of more than 100, generate a histogram with 100 buckets
# Set the correlation to 0 for all columns, since the data was shuffled randomly
def smoothStatisticsForOneCol(conn, table_name, attnum, row_count, ndv, corr):
# calculate stadistinct value and ndv, if specified as -1
if ndv == -1:
stadistinct = -1
ndv = row_count
else:
stadistinct = ndv
# stakind: 1 is a list of most common values and frequencies, 2 is a histogram with range buckets
stakind = 1
# arrays for stanumbers and stavalues
stanumbers = []
stavalues = []
stanumbers_txt = "NULL"
num_values = min(ndv, 100)
if ndv <= 100:
# produce "ndv" MCVs, each with the same frequency
for i in range(1,num_values+1):
stanumbers.append(str(float(1)/ndv))
stavalues.append(str(i))
stanumbers_txt = "'{ " + ", ".join(stanumbers) + " }'::float[]"
else:
# produce a uniformly distributed histogram with 100 buckets (101 boundaries)
stakind = 2
stavalues.append(str(1))
for j in range(1,num_values+1):
stavalues.append(str((j*ndv) // num_values))
stavalues_txt = "'{ " + ", ".join(stavalues) + " }'::int[]"
execute_sql(conn, _update_pg_stats % (stadistinct, stakind, stanumbers_txt, stavalues_txt, corr, table_name, attnum))
# ensure that we have perfect histogram statistics on the relevant columns
def smoothStatistics(conn, num_fact_table_rows):
prev_table_name = ""
if glob_gpdb_major_version > 5:
execute_sql(conn, _allow_system_mods)
else:
execute_sql(conn, _allow_system_mods_v5)
for tup in _stats_cols_to_fix:
# note that col_name is just for human readability
(table_name, col_name, attnum, ndv, table_rows, corr) = tup
if table_rows == -1:
table_rows = num_fact_table_rows
smoothStatisticsForOneCol(conn, table_name, attnum, table_rows, ndv, corr)
if prev_table_name != table_name:
prev_table_name = table_name
execute_sql(conn, _update_pg_class % (table_rows, table_name))
commit_db(conn)
def inspectExistingTables(conn):
global glob_rowcount
global glob_appendonly
sqlStr = "SELECT count(*) from cal_txtest"
curs = dbconn.query(conn, sqlStr)
rows = curs.fetchall()
for row in rows:
glob_rowcount = row[0]
log_output("Row count of existing fact table is %d" % glob_rowcount)
if glob_gpdb_major_version < 7:
sqlStr = "SELECT lower(unnest(reloptions)) from pg_class where relname = 'cal_txtest'"
else:
sqlStr = "SELECT case when relam=3434 then 'appendonly' else 'appendonly,column' end from pg_class where relname = 'cal_txtest' and relam in (3434,3435)"
curs = dbconn.query(conn, sqlStr)
rows = curs.fetchall()
for row in rows:
if re.search("appendonly", row[0]):
glob_appendonly = True
if glob_appendonly:
log_output("Existing fact table is append-only")
else:
log_output("Existing fact table is not an append-only table")
def main():
global glob_verbose
global glob_log_file
global glob_rowcount
args, parser = parseargs()
if args.logFile != "":
glob_log_file = open(args.logFile, "wt", 1)
if args.verbose:
glob_verbose = True
log_output("Connecting to host %s on port %d, database %s" % (args.host, args.port, args.dbName))
conn = connect(args.host, args.port, args.dbName)
select_version(conn)
if args.create:
glob_rowcount = args.numRows
createDB(conn, args.appendOnly, args.numRows)
smoothStatistics(conn, args.numRows)
else:
inspectExistingTables(conn)
for test_unit in args.tests:
if test_unit == "all":
run_bitmap_index_scan_tests(conn, args.execute)
if glob_appendonly:
# the btree tests are for bitmap scans on AO tables using btree indexes
run_btree_ao_index_scan_tests(conn, args.execute)
run_index_join_tests(conn, args.execute)
# skip the long-running bitmap_ndv_scan_tests and bfv_join_tests
elif test_unit == "index_only_scan_tests":
run_index_only_scan_tests(conn, args.execute)
elif test_unit == "bitmap_scan_tests":
run_bitmap_index_scan_tests(conn, args.execute)
elif test_unit == "bitmap_ndv_scan_tests":
run_bitmap_ndv_scan_tests(conn, args.execute)
elif test_unit == "btree_ao_scan_tests":
run_btree_ao_index_scan_tests(conn, args.execute)
elif test_unit == "index_join_tests":
run_index_join_tests(conn, args.execute)
elif test_unit == "bfv_join_tests":
run_bfv_join_tests(conn, args.execute)
elif test_unit == "brin_tests":
run_brin_tests(conn, args.execute)
elif test_unit == "none":
print("Skipping tests")
if args.drop:
dropDB(conn)
conn.close()
if glob_log_file != None:
glob_log_file.close()
if __name__ == "__main__":
main()
|
50wu/gpdb
|
src/backend/gporca/scripts/cal_bitmap_test.py
|
Python
|
apache-2.0
| 70,529 | 0.003474 |
# source: http://stackoverflow.com/questions/2758159/how-to-embed-a-python-interpreter-in-a-pyqt-widget
import sys
import os
import re
import traceback
import platform
from PyQt5 import QtCore
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from electrum import util
from electrum.i18n import _
if platform.system() == 'Windows':
MONOSPACE_FONT = 'Lucida Console'
elif platform.system() == 'Darwin':
MONOSPACE_FONT = 'Monaco'
else:
MONOSPACE_FONT = 'monospace'
class OverlayLabel(QtWidgets.QLabel):
STYLESHEET = '''
QLabel, QLabel link {
color: rgb(0, 0, 0);
background-color: rgb(248, 240, 200);
border: 1px solid;
border-color: rgb(255, 114, 47);
padding: 2px;
}
'''
def __init__(self, text, parent):
super().__init__(text, parent)
self.setMinimumHeight(150)
self.setGeometry(0, 0, self.width(), self.height())
self.setStyleSheet(self.STYLESHEET)
self.setMargin(0)
parent.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.setWordWrap(True)
def mousePressEvent(self, e):
self.hide()
def on_resize(self, w):
padding = 2 # px, from the stylesheet above
self.setFixedWidth(w - padding)
class Console(QtWidgets.QPlainTextEdit):
def __init__(self, prompt='>> ', startup_message='', parent=None):
QtWidgets.QPlainTextEdit.__init__(self, parent)
self.prompt = prompt
self.history = []
self.namespace = {}
self.construct = []
self.setGeometry(50, 75, 600, 400)
self.setWordWrapMode(QtGui.QTextOption.WrapAnywhere)
self.setUndoRedoEnabled(False)
self.document().setDefaultFont(QtGui.QFont(MONOSPACE_FONT, 10, QtGui.QFont.Normal))
self.showMessage(startup_message)
self.updateNamespace({'run':self.run_script})
self.set_json(False)
warning_text = "<h1>{}</h1><br>{}<br><br>{}".format(
_("Warning!"),
_("Do not paste code here that you don't understand. Executing the wrong code could lead "
"to your coins being irreversibly lost."),
_("Click here to hide this message.")
)
self.messageOverlay = OverlayLabel(warning_text, self)
def resizeEvent(self, e):
super().resizeEvent(e)
vertical_scrollbar_width = self.verticalScrollBar().width() * self.verticalScrollBar().isVisible()
self.messageOverlay.on_resize(self.width() - vertical_scrollbar_width)
def set_json(self, b):
self.is_json = b
def run_script(self, filename):
with open(filename) as f:
script = f.read()
# eval is generally considered bad practice. use it wisely!
result = eval(script, self.namespace, self.namespace)
def updateNamespace(self, namespace):
self.namespace.update(namespace)
def showMessage(self, message):
self.appendPlainText(message)
self.newPrompt()
def clear(self):
self.setPlainText('')
self.newPrompt()
def newPrompt(self):
if self.construct:
prompt = '.' * len(self.prompt)
else:
prompt = self.prompt
self.completions_pos = self.textCursor().position()
self.completions_visible = False
self.appendPlainText(prompt)
self.moveCursor(QtGui.QTextCursor.End)
def getCommand(self):
doc = self.document()
curr_line = doc.findBlockByLineNumber(doc.lineCount() - 1).text()
curr_line = curr_line.rstrip()
curr_line = curr_line[len(self.prompt):]
return curr_line
def setCommand(self, command):
if self.getCommand() == command:
return
doc = self.document()
curr_line = doc.findBlockByLineNumber(doc.lineCount() - 1).text()
self.moveCursor(QtGui.QTextCursor.End)
for i in range(len(curr_line) - len(self.prompt)):
self.moveCursor(QtGui.QTextCursor.Left, QtGui.QTextCursor.KeepAnchor)
self.textCursor().removeSelectedText()
self.textCursor().insertText(command)
self.moveCursor(QtGui.QTextCursor.End)
def show_completions(self, completions):
if self.completions_visible:
self.hide_completions()
c = self.textCursor()
c.setPosition(self.completions_pos)
completions = map(lambda x: x.split('.')[-1], completions)
t = '\n' + ' '.join(completions)
if len(t) > 500:
t = t[:500] + '...'
c.insertText(t)
self.completions_end = c.position()
self.moveCursor(QtGui.QTextCursor.End)
self.completions_visible = True
def hide_completions(self):
if not self.completions_visible:
return
c = self.textCursor()
c.setPosition(self.completions_pos)
l = self.completions_end - self.completions_pos
for x in range(l): c.deleteChar()
self.moveCursor(QtGui.QTextCursor.End)
self.completions_visible = False
def getConstruct(self, command):
if self.construct:
prev_command = self.construct[-1]
self.construct.append(command)
if not prev_command and not command:
ret_val = '\n'.join(self.construct)
self.construct = []
return ret_val
else:
return ''
else:
if command and command[-1] == (':'):
self.construct.append(command)
return ''
else:
return command
def getHistory(self):
return self.history
def setHisory(self, history):
self.history = history
def addToHistory(self, command):
if command[0:1] == ' ':
return
if command and (not self.history or self.history[-1] != command):
self.history.append(command)
self.history_index = len(self.history)
def getPrevHistoryEntry(self):
if self.history:
self.history_index = max(0, self.history_index - 1)
return self.history[self.history_index]
return ''
def getNextHistoryEntry(self):
if self.history:
hist_len = len(self.history)
self.history_index = min(hist_len, self.history_index + 1)
if self.history_index < hist_len:
return self.history[self.history_index]
return ''
def getCursorPosition(self):
c = self.textCursor()
return c.position() - c.block().position() - len(self.prompt)
def setCursorPosition(self, position):
self.moveCursor(QtGui.QTextCursor.StartOfLine)
for i in range(len(self.prompt) + position):
self.moveCursor(QtGui.QTextCursor.Right)
def register_command(self, c, func):
methods = { c: func}
self.updateNamespace(methods)
def runCommand(self):
command = self.getCommand()
self.addToHistory(command)
command = self.getConstruct(command)
if command:
tmp_stdout = sys.stdout
class stdoutProxy():
def __init__(self, write_func):
self.write_func = write_func
self.skip = False
def flush(self):
pass
def write(self, text):
if not self.skip:
stripped_text = text.rstrip('\n')
self.write_func(stripped_text)
QtCore.QCoreApplication.processEvents()
self.skip = not self.skip
if type(self.namespace.get(command)) == type(lambda:None):
self.appendPlainText("'{}' is a function. Type '{}()' to use it in the Python console."
.format(command, command))
self.newPrompt()
return
sys.stdout = stdoutProxy(self.appendPlainText)
try:
try:
# eval is generally considered bad practice. use it wisely!
result = eval(command, self.namespace, self.namespace)
if result != None:
if self.is_json:
util.print_msg(util.json_encode(result))
else:
self.appendPlainText(repr(result))
except SyntaxError:
# exec is generally considered bad practice. use it wisely!
exec(command, self.namespace, self.namespace)
except SystemExit:
self.close()
except BaseException:
traceback_lines = traceback.format_exc().split('\n')
# Remove traceback mentioning this file, and a linebreak
for i in (3,2,1,-1):
traceback_lines.pop(i)
self.appendPlainText('\n'.join(traceback_lines))
sys.stdout = tmp_stdout
self.newPrompt()
self.set_json(False)
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Tab:
self.completions()
return
self.hide_completions()
if event.key() in (QtCore.Qt.Key_Enter, QtCore.Qt.Key_Return):
self.runCommand()
return
if event.key() == QtCore.Qt.Key_Home:
self.setCursorPosition(0)
return
if event.key() == QtCore.Qt.Key_PageUp:
return
elif event.key() in (QtCore.Qt.Key_Left, QtCore.Qt.Key_Backspace):
if self.getCursorPosition() == 0:
return
elif event.key() == QtCore.Qt.Key_Up:
self.setCommand(self.getPrevHistoryEntry())
return
elif event.key() == QtCore.Qt.Key_Down:
self.setCommand(self.getNextHistoryEntry())
return
elif event.key() == QtCore.Qt.Key_L and event.modifiers() == QtCore.Qt.ControlModifier:
self.clear()
super(Console, self).keyPressEvent(event)
def completions(self):
cmd = self.getCommand()
lastword = re.split(' |\(|\)',cmd)[-1]
beginning = cmd[0:-len(lastword)]
path = lastword.split('.')
ns = self.namespace.keys()
if len(path) == 1:
ns = ns
prefix = ''
else:
obj = self.namespace.get(path[0])
prefix = path[0] + '.'
ns = dir(obj)
completions = []
for x in ns:
if x[0] == '_':continue
xx = prefix + x
if xx.startswith(lastword):
completions.append(xx)
completions.sort()
if not completions:
self.hide_completions()
elif len(completions) == 1:
self.hide_completions()
self.setCommand(beginning + completions[0])
else:
# find common prefix
p = os.path.commonprefix(completions)
if len(p)>len(lastword):
self.hide_completions()
self.setCommand(beginning + p)
else:
self.show_completions(completions)
welcome_message = '''
---------------------------------------------------------------
Welcome to a primitive Python interpreter.
---------------------------------------------------------------
'''
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
console = Console(startup_message=welcome_message)
console.updateNamespace({'myVar1' : app, 'myVar2' : 1234})
console.show()
sys.exit(app.exec_())
|
cryptapus/electrum
|
electrum/gui/qt/console.py
|
Python
|
mit
| 11,672 | 0.002656 |
import pytest
import os
import sys
from polyglotdb.io import inspect_ilg
from polyglotdb.io.helper import guess_type
from polyglotdb.exceptions import DelimiterError, ILGWordMismatchError
from polyglotdb import CorpusContext
def test_inspect_ilg(ilg_test_dir):
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
assert (len(parser.annotation_tiers) == 2)
assert (parser.annotation_tiers[1].trans_delimiter == '.')
def test_inspect_ilg_directory(ilg_test_dir):
parser = inspect_ilg(ilg_test_dir)
assert (len(parser.annotation_tiers) == 2)
@pytest.mark.xfail
def test_export_ilg(graph_db, export_test_dir):
export_path = os.path.join(export_test_dir, 'export_ilg.txt')
with CorpusContext('untimed', **graph_db) as c:
export_discourse_ilg(c, 'test', export_path,
annotations=['label', 'transcription'], words_per_line=3)
expected_lines = ['cats are cute',
'k.ae.t.s aa.r k.uw.t',
'dogs are too',
'd.aa.g.z aa.r t.uw',
'i guess',
'ay g.eh.s']
with open(export_path, 'r') as f:
for i, line in enumerate(f):
assert (line.strip() == expected_lines[i])
def test_ilg_basic(graph_db, ilg_test_dir):
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
with CorpusContext('basic_ilg', **graph_db) as c:
c.reset()
c.load(parser, basic_path)
# assert(c.lexicon['a'].frequency == 2)
def test_ilg_mismatched(graph_db, ilg_test_dir):
mismatched_path = os.path.join(ilg_test_dir, 'mismatched.txt')
basic_path = os.path.join(ilg_test_dir, 'basic.txt')
parser = inspect_ilg(basic_path)
with CorpusContext('mismatch', **graph_db) as c:
c.reset()
with pytest.raises(ILGWordMismatchError):
c.load(parser, mismatched_path)
|
PhonologicalCorpusTools/PolyglotDB
|
tests/test_io_ilg.py
|
Python
|
mit
| 1,972 | 0.000507 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Utility functions that can be used in multiple scripts."""
|
MartinThoma/pysec
|
pysec/utils.py
|
Python
|
mit
| 108 | 0.009259 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/deed/guild_deed/shared_tatooine_guild_style_02_deed.iff"
result.attribute_template_id = 2
result.stfName("deed","tatooine_guild_2_deed")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/tangible/deed/guild_deed/shared_tatooine_guild_style_02_deed.py
|
Python
|
mit
| 473 | 0.046512 |
# vim: set fileencoding=utf-8
# Pavel Odvody <podvody@redhat.com>
#
# HICA - Host integrated container applications
#
# MIT License (C) 2015
import os, sys
from json import loads
from base.hica_base import *
library_path='/usr/lib64'
class LibraryInjector(HicaInjector):
def _get_libs(self):
return sorted(loads(self.labels.get_value('io.hica.libraries')))
def get_description(self):
return 'Bind mounts libraries {0} into the container'.format(', '.join(self._get_libs()))
def get_config_key(self):
return 'io.hica.libraries'
def get_injected_args(self):
return (('--libraries', HicaValueType.STRING, ''), ('--library-path', HicaValueType.PATH, '/usr/lib64'))
def inject_config(self, config, from_args):
"""
:param config:
:type config: list
:param from_args:
:type from_args: dict
"""
load_libs = self._get_libs()
all_libs = {}
found_libs = []
for root, dirs, files in os.walk(library_path):
for f in files:
if not f.endswith('.so'):
continue
full_path = os.path.join(root, f)
if '.' in f:
name, ext = f.split('.', 1)
else:
name = f
if name in all_libs:
all_libs[name].append(full_path)
else:
all_libs[name] = [full_path]
for lib in load_libs:
if 'lib' + lib in all_libs:
p = list(sorted(all_libs['lib' + lib], key=lambda x: len(x))).pop()
v = '--volume={0}:{1}'.format(os.path.realpath(p), p)
config.append(v)
else:
print('*** Unknown lib: {}'.format(lib))
sys.exit(1)
|
shaded-enmity/docker-hica
|
injectors/libs.py
|
Python
|
mit
| 1,615 | 0.011765 |
""" The SystemAdministratorClient is a class representing the client of the DIRAC
SystemAdministrator service. It has also methods to update the Configuration
Service with the DIRAC components options
"""
__RCSID__ = "$Id$"
from DIRAC.Core.Base.Client import Client, createClient
SYSADMIN_PORT = 9162
@createClient('Framework/SystemAdministrator')
class SystemAdministratorClient(Client):
def __init__(self, host, port=None, **kwargs):
""" Constructor function. Takes a mandatory host parameter
"""
Client.__init__(self, **kwargs)
if not port:
port = SYSADMIN_PORT
self.setServer('dips://%s:%s/Framework/SystemAdministrator' % (host, port))
|
fstagni/DIRAC
|
FrameworkSystem/Client/SystemAdministratorClient.py
|
Python
|
gpl-3.0
| 683 | 0.005857 |
# Copyright 2016 OpenMarket Ltd
# Copyright 2017 Vector Creations Ltd
# Copyright 2018-2019 New Vector Ltd
# Copyright 2019 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
from typing import TYPE_CHECKING, Tuple
from twisted.web.server import Request
from synapse.api.constants import RoomCreationPreset
from synapse.http.server import HttpServer
from synapse.http.servlet import RestServlet
from synapse.types import JsonDict
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class VersionsRestServlet(RestServlet):
PATTERNS = [re.compile("^/_matrix/client/versions$")]
def __init__(self, hs: "HomeServer"):
super().__init__()
self.config = hs.config
# Calculate these once since they shouldn't change after start-up.
self.e2ee_forced_public = (
RoomCreationPreset.PUBLIC_CHAT
in self.config.room.encryption_enabled_by_default_for_room_presets
)
self.e2ee_forced_private = (
RoomCreationPreset.PRIVATE_CHAT
in self.config.room.encryption_enabled_by_default_for_room_presets
)
self.e2ee_forced_trusted_private = (
RoomCreationPreset.TRUSTED_PRIVATE_CHAT
in self.config.room.encryption_enabled_by_default_for_room_presets
)
def on_GET(self, request: Request) -> Tuple[int, JsonDict]:
return (
200,
{
"versions": [
# XXX: at some point we need to decide whether we need to include
# the previous version numbers, given we've defined r0.3.0 to be
# backwards compatible with r0.2.0. But need to check how
# conscientious we've been in compatibility, and decide whether the
# middle number is the major revision when at 0.X.Y (as opposed to
# X.Y.Z). And we need to decide whether it's fair to make clients
# parse the version string to figure out what's going on.
"r0.0.1",
"r0.1.0",
"r0.2.0",
"r0.3.0",
"r0.4.0",
"r0.5.0",
"r0.6.0",
"r0.6.1",
"v1.1",
"v1.2",
],
# as per MSC1497:
"unstable_features": {
# Implements support for label-based filtering as described in
# MSC2326.
"org.matrix.label_based_filtering": True,
# Implements support for cross signing as described in MSC1756
"org.matrix.e2e_cross_signing": True,
# Implements additional endpoints as described in MSC2432
"org.matrix.msc2432": True,
# Implements additional endpoints as described in MSC2666
"uk.half-shot.msc2666": True,
# Whether new rooms will be set to encrypted or not (based on presets).
"io.element.e2ee_forced.public": self.e2ee_forced_public,
"io.element.e2ee_forced.private": self.e2ee_forced_private,
"io.element.e2ee_forced.trusted_private": self.e2ee_forced_trusted_private,
# Supports the busy presence state described in MSC3026.
"org.matrix.msc3026.busy_presence": self.config.experimental.msc3026_enabled,
# Supports receiving hidden read receipts as per MSC2285
"org.matrix.msc2285": self.config.experimental.msc2285_enabled,
# Adds support for importing historical messages as per MSC2716
"org.matrix.msc2716": self.config.experimental.msc2716_enabled,
# Adds support for jump to date endpoints (/timestamp_to_event) as per MSC3030
"org.matrix.msc3030": self.config.experimental.msc3030_enabled,
# Adds support for thread relations, per MSC3440.
"org.matrix.msc3440": self.config.experimental.msc3440_enabled,
},
},
)
def register_servlets(hs: "HomeServer", http_server: HttpServer) -> None:
VersionsRestServlet(hs).register(http_server)
|
matrix-org/synapse
|
synapse/rest/client/versions.py
|
Python
|
apache-2.0
| 4,936 | 0.003241 |
#!/usr/bin/python
# #
# Copyright 2012-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
This script is a collection of all the testcases.
Usage: "python -m test.framework.suite" or "python test/framework/suite.py"
@author: Toon Willems (Ghent University)
@author: Kenneth Hoste (Ghent University)
"""
import glob
import os
import sys
import tempfile
import unittest
# initialize EasyBuild logging, so we disable it
from easybuild.base import fancylogger
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.options import set_tmpdir
import test.framework.asyncprocess as a
import test.framework.build_log as bl
import test.framework.config as c
import test.framework.containers as ct
import test.framework.easyblock as b
import test.framework.easyconfig as e
import test.framework.easyconfigparser as ep
import test.framework.easyconfigformat as ef
import test.framework.ebconfigobj as ebco
import test.framework.easyconfigversion as ev
import test.framework.environment as env
import test.framework.docs as d
import test.framework.filetools as f
import test.framework.format_convert as f_c
import test.framework.general as gen
import test.framework.github as g
import test.framework.hooks as h
import test.framework.include as i
import test.framework.lib as lib
import test.framework.license as lic
import test.framework.module_generator as mg
import test.framework.modules as m
import test.framework.modulestool as mt
import test.framework.options as o
import test.framework.parallelbuild as p
import test.framework.package as pkg
import test.framework.repository as r
import test.framework.robot as robot
import test.framework.run as run
import test.framework.style as st
import test.framework.systemtools as s
import test.framework.toolchain as tc
import test.framework.toolchainvariables as tcv
import test.framework.toy_build as t
import test.framework.type_checking as et
import test.framework.tweak as tw
import test.framework.variables as v
import test.framework.yeb as y
# set plain text key ring to be used,
# so a GitHub token stored in it can be obtained without having to provide a password
try:
# with recent versions of keyring, PlaintextKeyring comes from keyrings.alt
import keyring
from keyrings.alt.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
try:
# with old versions of keyring, PlaintextKeyring comes from keyring.backends
import keyring
from keyring.backends.file import PlaintextKeyring
keyring.set_keyring(PlaintextKeyring())
except ImportError:
pass
# disable all logging to significantly speed up tests
fancylogger.disableDefaultHandlers()
fancylogger.setLogLevelError()
# make sure temporary files can be created/used
try:
set_tmpdir(raise_error=True)
except EasyBuildError as err:
sys.stderr.write("No execution rights on temporary files, specify another location via $TMPDIR: %s\n" % err)
sys.exit(1)
# initialize logger for all the unit tests
fd, log_fn = tempfile.mkstemp(prefix='easybuild-tests-', suffix='.log')
os.close(fd)
os.remove(log_fn)
fancylogger.logToFile(log_fn)
log = fancylogger.getLogger()
# call suite() for each module and then run them all
# note: make sure the options unit tests run first, to avoid running some of them with a readily initialized config
tests = [gen, bl, o, r, ef, ev, ebco, ep, e, mg, m, mt, f, run, a, robot, b, v, g, tcv, tc, t, c, s, lic, f_c,
tw, p, i, pkg, d, env, et, y, st, h, ct, lib]
SUITE = unittest.TestSuite([x.suite() for x in tests])
res = unittest.TextTestRunner().run(SUITE)
fancylogger.logToFile(log_fn, enable=False)
if not res.wasSuccessful():
sys.stderr.write("ERROR: Not all tests were successful.\n")
print("Log available at %s" % log_fn)
sys.exit(2)
else:
for fn in glob.glob('%s*' % log_fn):
os.remove(fn)
|
gppezzi/easybuild-framework
|
test/framework/suite.py
|
Python
|
gpl-2.0
| 4,872 | 0.001437 |
#!/usr/bin/env python3
import os
from setuptools import setup
import nvpy
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="nvpy",
version=nvpy.VERSION,
author="Charl P. Botha",
author_email="cpbotha@vxlabs.com",
description="A cross-platform simplenote-syncing note-taking app inspired by Notational Velocity.",
license="BSD",
keywords="simplenote note-taking tkinter nvalt markdown",
url="https://github.com/cpbotha/nvpy",
packages=['nvpy'],
long_description=read('README.rst'),
install_requires=[
# These are in reality not hard requirements of nvpy. If these packages are not installed,
# the Markdown/reStructuredText rendering feature will not work. But basic functions should work.
'Markdown',
'docutils',
# This is hard requirements of nvpy.
'simplenote>=2.1.4',
],
extras_require={
# development and test requirements.
'dev': ['mock', 'yapf', 'pdoc3', 'nose', 'nose-timer', 'mypy'],
},
entry_points={'gui_scripts': ['nvpy = nvpy.nvpy:main']},
# use MANIFEST.in file
# because package_data is ignored during sdist
include_package_data=True,
classifiers=[
# See https://pypi.org/classifiers/
"Development Status :: 5 - Production/Stable",
"Environment :: X11 Applications",
"Environment :: MacOS X",
"Environment :: Win32 (MS Windows)",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
],
)
|
yuuki0xff/nvpy
|
setup.py
|
Python
|
bsd-3-clause
| 1,989 | 0.001508 |
#
# ovirt-hosted-engine-ha -- ovirt hosted engine high availability
# Copyright (C) 2013 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
import logging
from ovirt_hosted_engine_ha.broker import submonitor_base
from ovirt_hosted_engine_ha.lib import log_filter
from ovirt_hosted_engine_ha.lib import util as util
from vdsm.client import ServerError
def register():
return "mem-free"
class Submonitor(submonitor_base.SubmonitorBase):
def setup(self, options):
self._log = logging.getLogger("%s.MemFree" % __name__)
self._log.addFilter(log_filter.get_intermittent_filter())
def action(self, options):
cli = util.connect_vdsm_json_rpc(
logger=self._log
)
try:
stats = cli.Host.getStats()
except ServerError as e:
self._log.error(e)
self.update_result(None)
return
mem_free = str(stats['memFree'])
self._log.info("memFree: %s", mem_free,
extra=log_filter.lf_args('status', 60))
self.update_result(mem_free)
|
oVirt/ovirt-hosted-engine-ha
|
ovirt_hosted_engine_ha/broker/submonitors/mem_free.py
|
Python
|
lgpl-2.1
| 1,774 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for QueryContextLineageSubgraph
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1beta1_MetadataService_QueryContextLineageSubgraph_async]
from google.cloud import aiplatform_v1beta1
async def sample_query_context_lineage_subgraph():
# Create a client
client = aiplatform_v1beta1.MetadataServiceAsyncClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.QueryContextLineageSubgraphRequest(
context="context_value",
)
# Make the request
response = await client.query_context_lineage_subgraph(request=request)
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1beta1_MetadataService_QueryContextLineageSubgraph_async]
|
googleapis/python-aiplatform
|
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_metadata_service_query_context_lineage_subgraph_async.py
|
Python
|
apache-2.0
| 1,624 | 0.001847 |
# -*- coding: utf-8 -*-
# template 18
"""
Various tools at your fingertips.
The available tools are:
* cvt_csv_2_rst.py: convert csv file into rst file
* cvt_csv_2_xml.py: convert csv file into xml file
* cvt_script: parse bash script and convert to meet company standard
* gen_readme.py: generate documentation files, mainly README.rst
* odoo_dependency.py: show odoo depencies and/or Odoo module tree
* odoo_translation.py: manage Odoo translation
* pep8: parse source .py file to meet pep8 and convert across Odoo versions
* please: developer shell
* wget_odoo_repositories.py: get repository names from github.com
"""
import os
import sys
import pkg_resources
import gzip
import shutil
__version__ = '1.0.7.1'
def fake_setup(**kwargs):
globals()['setup_args'] = kwargs
def read_setup():
setup_info = os.path.abspath(
os.path.join(os.path.dirname(__file__), 'setup.info'))
if not os.path.isfile(setup_info):
setup_info = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', 'setup.py'))
setup_args = {}
if os.path.isfile(setup_info):
with open(setup_info, 'r') as fd:
exec(fd.read().replace('setup(', 'fake_setup('))
setup_args = globals()['setup_args']
else:
print('Not internal configuration file found!')
setup_args['setup'] = setup_info
try:
pkg = pkg_resources.get_distribution(__package__.split('.')[0])
setup_args['name'] = pkg.key
setup_args['version'] = pkg.version
except BaseException:
pass
return setup_args
def get_pypi_paths():
local_venv = '/devel/venv/'
pkgpath = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..'))
bin_path = lib_path = ''
path = pkgpath
while not bin_path and path != '/' and path != os.environ['HOME']:
path = os.path.dirname(path)
if os.path.isdir(path) and os.path.basename(path) == 'lib':
bin_path = os.path.join(os.path.dirname(path), 'bin')
lib_path = path
if not bin_path and local_venv:
for path in sys.path:
if local_venv in path:
bin_path = os.path.join(
path[:path.find(local_venv)],
*[x for x in local_venv.split('/') if x][:-1])
break
return pkgpath, bin_path, lib_path
def copy_pkg_data(setup_args, verbose):
if setup_args.get('package_data'):
pkgpath, bin_path, lib_path = get_pypi_paths()
if bin_path:
# TODO> compatibility mode
bin2_path = os.path.join(os.environ['HOME'], 'devel')
if not os.path.isdir(bin2_path):
bin2_path = ''
man_path = os.path.join(bin_path, 'man', 'man8')
if not os.path.isdir(man_path):
man_path = ''
for pkg in setup_args['package_data'].keys():
for fn in setup_args['package_data'][pkg]:
base = os.path.basename(fn)
if base in ('setup.info', '*'):
continue
full_fn = os.path.abspath(os.path.join(pkgpath, fn))
if base.endswith('.man') and man_path:
with open(full_fn, 'r') as fd:
help_text = fd.read()
tgt_fn = os.path.join(man_path, '%s.8.gz' % base[:-4])
with gzip.open(tgt_fn, 'w') as fd:
if sys.version_info[0] == 3:
fd.write(help_text.encode('utf-8'))
else:
fd.write(help_text)
continue
if lib_path:
tgt_fn = os.path.join(lib_path, base)
if verbose:
print('$ cp %s %s' % (full_fn, tgt_fn))
shutil.copy(full_fn, tgt_fn)
# TODO> compatibility mode
tgt_fn = os.path.join(bin_path, base)
if os.path.isfile(tgt_fn):
os.unlink(tgt_fn)
if not os.path.exists(tgt_fn):
if verbose:
print('$ ln -s %s %s' % (full_fn, tgt_fn))
os.symlink(full_fn, tgt_fn)
if bin2_path:
tgt_fn = os.path.join(bin2_path, base)
if os.path.isfile(tgt_fn):
os.unlink(tgt_fn)
# if not os.path.exists(tgt_fn):
# if verbose:
# print('$ ln -s %s %s' % (full_fn, tgt_fn))
# os.symlink(full_fn, tgt_fn)
# TODO> compatibility mode to remove early
if lib_path and bin2_path:
for base in ('z0librc', 'odoorc', 'travisrc'):
full_fn = os.path.join(bin2_path, base)
tgt_fn = os.path.join(bin_path, base)
if os.path.exists(full_fn) and not os.path.exists(tgt_fn):
if verbose:
print('$ cp %s %s' % (full_fn, tgt_fn))
shutil.copy(full_fn, tgt_fn)
def main(cli_args=None):
if not cli_args:
cli_args = sys.argv[1:]
action = '-H'
verbose = False
for arg in cli_args:
if arg in ('-h', '-H', '--help', '-V', '--version', '--copy-pkg-data'):
action = arg
elif arg == '-v':
verbose = True
setup_args = read_setup()
if action == '-h':
print('%s [-h][-H][--help][-V][--version][-C][--copy-pkg-data]' %
setup_args['name'])
elif action in ('-V', '--version'):
if setup_args['version'] == __version__:
print(setup_args['version'])
else:
print('Version mismatch %s/%s' % (setup_args['version'],
__version__))
elif action in ('-H', '--help'):
for text in __doc__.split('\n'):
print(text)
elif action in ('-C', '--copy-pkg-data'):
copy_pkg_data(setup_args, verbose)
return 0
|
zeroincombenze/tools
|
wok_code/scripts/main.py
|
Python
|
agpl-3.0
| 6,236 | 0.00016 |
# GUI frame for the sineTransformations_function.py
import os
from Tkinter import *
import tkFileDialog
import tkMessageBox
import numpy as np
from smst.utils import audio
from . import sineTransformations_function as sT
from smst.utils.files import strip_file
class SineTransformationsFrame:
def __init__(self, parent):
self.parent = parent
self.initUI()
def initUI(self):
choose_label = "inputFile:"
Label(self.parent, text=choose_label).grid(row=0, column=0, sticky=W, padx=5, pady=(10, 2))
# TEXTBOX TO PRINT PATH OF THE SOUND FILE
self.filelocation = Entry(self.parent)
self.filelocation.focus_set()
self.filelocation["width"] = 32
self.filelocation.grid(row=0, column=0, sticky=W, padx=(70, 5), pady=(10, 2))
self.filelocation.delete(0, END)
self.filelocation.insert(0, 'sounds/mridangam.wav')
# BUTTON TO BROWSE SOUND FILE
open_file = Button(self.parent, text="...", command=self.browse_file) # see: def browse_file(self)
open_file.grid(row=0, column=0, sticky=W, padx=(340, 6), pady=(10, 2)) # put it beside the filelocation textbox
# BUTTON TO PREVIEW SOUND FILE
preview = Button(self.parent, text=">", command=lambda: audio.play_wav(self.filelocation.get()), bg="gray30",
fg="white")
preview.grid(row=0, column=0, sticky=W, padx=(385, 6), pady=(10, 2))
## SINE TRANSFORMATIONS ANALYSIS
# ANALYSIS WINDOW TYPE
wtype_label = "window:"
Label(self.parent, text=wtype_label).grid(row=1, column=0, sticky=W, padx=5, pady=(10, 2))
self.w_type = StringVar()
self.w_type.set("hamming") # initial value
window_option = OptionMenu(self.parent, self.w_type, "rectangular", "hanning", "hamming", "blackman",
"blackmanharris")
window_option.grid(row=1, column=0, sticky=W, padx=(65, 5), pady=(10, 2))
# WINDOW SIZE
M_label = "M:"
Label(self.parent, text=M_label).grid(row=1, column=0, sticky=W, padx=(180, 5), pady=(10, 2))
self.M = Entry(self.parent, justify=CENTER)
self.M["width"] = 5
self.M.grid(row=1, column=0, sticky=W, padx=(200, 5), pady=(10, 2))
self.M.delete(0, END)
self.M.insert(0, "801")
# FFT SIZE
N_label = "N:"
Label(self.parent, text=N_label).grid(row=1, column=0, sticky=W, padx=(255, 5), pady=(10, 2))
self.N = Entry(self.parent, justify=CENTER)
self.N["width"] = 5
self.N.grid(row=1, column=0, sticky=W, padx=(275, 5), pady=(10, 2))
self.N.delete(0, END)
self.N.insert(0, "2048")
# THRESHOLD MAGNITUDE
t_label = "t:"
Label(self.parent, text=t_label).grid(row=1, column=0, sticky=W, padx=(330, 5), pady=(10, 2))
self.t = Entry(self.parent, justify=CENTER)
self.t["width"] = 5
self.t.grid(row=1, column=0, sticky=W, padx=(348, 5), pady=(10, 2))
self.t.delete(0, END)
self.t.insert(0, "-90")
# MIN DURATION SINUSOIDAL TRACKS
minSineDur_label = "minSineDur:"
Label(self.parent, text=minSineDur_label).grid(row=2, column=0, sticky=W, padx=(5, 5), pady=(10, 2))
self.minSineDur = Entry(self.parent, justify=CENTER)
self.minSineDur["width"] = 5
self.minSineDur.grid(row=2, column=0, sticky=W, padx=(87, 5), pady=(10, 2))
self.minSineDur.delete(0, END)
self.minSineDur.insert(0, "0.01")
# MAX NUMBER OF SINES
maxnSines_label = "maxnSines:"
Label(self.parent, text=maxnSines_label).grid(row=2, column=0, sticky=W, padx=(145, 5), pady=(10, 2))
self.maxnSines = Entry(self.parent, justify=CENTER)
self.maxnSines["width"] = 5
self.maxnSines.grid(row=2, column=0, sticky=W, padx=(220, 5), pady=(10, 2))
self.maxnSines.delete(0, END)
self.maxnSines.insert(0, "150")
# FREQUENCY DEVIATION ALLOWED
freqDevOffset_label = "freqDevOffset:"
Label(self.parent, text=freqDevOffset_label).grid(row=2, column=0, sticky=W, padx=(280, 5), pady=(10, 2))
self.freqDevOffset = Entry(self.parent, justify=CENTER)
self.freqDevOffset["width"] = 5
self.freqDevOffset.grid(row=2, column=0, sticky=W, padx=(372, 5), pady=(10, 2))
self.freqDevOffset.delete(0, END)
self.freqDevOffset.insert(0, "20")
# SLOPE OF THE FREQUENCY DEVIATION
freqDevSlope_label = "freqDevSlope:"
Label(self.parent, text=freqDevSlope_label).grid(row=3, column=0, sticky=W, padx=(5, 5), pady=(10, 2))
self.freqDevSlope = Entry(self.parent, justify=CENTER)
self.freqDevSlope["width"] = 5
self.freqDevSlope.grid(row=3, column=0, sticky=W, padx=(98, 5), pady=(10, 2))
self.freqDevSlope.delete(0, END)
self.freqDevSlope.insert(0, "0.02")
# BUTTON TO DO THE ANALYSIS OF THE SOUND
self.compute = Button(self.parent, text="Analysis/Synthesis", command=self.analysis, bg="dark red", fg="white")
self.compute.grid(row=4, column=0, padx=5, pady=(10, 5), sticky=W)
# BUTTON TO PLAY ANALYSIS/SYNTHESIS OUTPUT
self.output = Button(self.parent, text=">", command=lambda: audio.play_wav(
'output_sounds/' + strip_file(self.filelocation.get()) + '_sineModel.wav'), bg="gray30",
fg="white")
self.output.grid(row=4, column=0, padx=(145, 5), pady=(10, 5), sticky=W)
###
# SEPARATION LINE
Frame(self.parent, height=1, width=50, bg="black").grid(row=5, pady=5, sticky=W + E)
###
# FREQUENCY SCALING FACTORS
freqScaling_label = "Frequency scaling factors (time, value pairs):"
Label(self.parent, text=freqScaling_label).grid(row=6, column=0, sticky=W, padx=5, pady=(5, 2))
self.freqScaling = Entry(self.parent, justify=CENTER)
self.freqScaling["width"] = 35
self.freqScaling.grid(row=7, column=0, sticky=W + E, padx=5, pady=(0, 2))
self.freqScaling.delete(0, END)
self.freqScaling.insert(0, "[0, 2.0, 1, .3]")
# TIME SCALING FACTORS
timeScaling_label = "Time scaling factors (in time, value pairs):"
Label(self.parent, text=timeScaling_label).grid(row=8, column=0, sticky=W, padx=5, pady=(5, 2))
self.timeScaling = Entry(self.parent, justify=CENTER)
self.timeScaling["width"] = 35
self.timeScaling.grid(row=9, column=0, sticky=W + E, padx=5, pady=(0, 2))
self.timeScaling.delete(0, END)
self.timeScaling.insert(0, "[0, .0, .671, .671, 1.978, 1.978+1.0]")
# BUTTON TO DO THE SYNTHESIS
self.compute = Button(self.parent, text="Apply Transformation", command=self.transformation_synthesis,
bg="dark green", fg="white")
self.compute.grid(row=13, column=0, padx=5, pady=(10, 15), sticky=W)
# BUTTON TO PLAY TRANSFORMATION SYNTHESIS OUTPUT
self.transf_output = Button(self.parent, text=">", command=lambda: audio.play_wav(
'output_sounds/' + strip_file(self.filelocation.get()) + '_sineModelTransformation.wav'),
bg="gray30", fg="white")
self.transf_output.grid(row=13, column=0, padx=(165, 5), pady=(10, 15), sticky=W)
# define options for opening file
self.file_opt = options = {}
options['defaultextension'] = '.wav'
options['filetypes'] = [('All files', '.*'), ('Wav files', '.wav')]
options['initialdir'] = 'sounds/'
options['title'] = 'Open a mono audio file .wav with sample frequency 44100 Hz'
def browse_file(self):
self.filename = tkFileDialog.askopenfilename(**self.file_opt)
# set the text of the self.filelocation
self.filelocation.delete(0, END)
self.filelocation.insert(0, self.filename)
def analysis(self):
try:
inputFile = self.filelocation.get()
window = self.w_type.get()
M = int(self.M.get())
N = int(self.N.get())
t = int(self.t.get())
minSineDur = float(self.minSineDur.get())
maxnSines = int(self.maxnSines.get())
freqDevOffset = int(self.freqDevOffset.get())
freqDevSlope = float(self.freqDevSlope.get())
self.inputFile, self.fs, self.tfreq, self.tmag = sT.analysis(inputFile, window, M, N, t, minSineDur,
maxnSines, freqDevOffset, freqDevSlope)
except ValueError:
tkMessageBox.showerror("Input values error", "Some parameters are incorrect")
def transformation_synthesis(self):
try:
inputFile = self.inputFile
fs = self.fs
tfreq = self.tfreq
tmag = self.tmag
freqScaling = np.array(eval(self.freqScaling.get()))
timeScaling = np.array(eval(self.timeScaling.get()))
sT.transformation_synthesis(inputFile, fs, tfreq, tmag, freqScaling, timeScaling)
except ValueError as errorMessage:
tkMessageBox.showerror("Input values error", errorMessage)
except AttributeError:
tkMessageBox.showerror("Analysis not computed", "First you must analyse the sound!")
|
bzamecnik/sms-tools
|
smst/ui/transformations/sineTransformations_GUI_frame.py
|
Python
|
agpl-3.0
| 9,380 | 0.004158 |
N = int(input())
ans = [0] * N
for i in range(0, N, 5):
q = [0] * N
for j in range(i, min(N, i + 5)):
q[j] = 10 ** (j - i)
print('? {}'.format(' '.join(map(str, q))), flush=True)
S = str(int(input().strip()) - sum(q) * 7)[::-1]
for j in range(i, min(N, i + 5)):
ans[j] = (int(S[j - i]) % 2) ^ 1
print('! {}'.format(' '.join(map(str, ans))), flush=True)
|
knuu/competitive-programming
|
atcoder/corp/codethanksfes2017_e.py
|
Python
|
mit
| 389 | 0 |
# A Magento 2 module generator library
# Copyright (C) 2016 Maikel Martens
#
# This file is part of Mage2Gen.
#
# Mage2Gen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import string
class DefaultFormatter(string.Formatter):
def __init__(self, default=''):
self.default = default
def get_field(self, field_name, args, kwargs):
try:
return super().get_field(field_name, args, kwargs)
except (KeyError, AttributeError):
return self.default
def upperfirst(word):
return word[0].upper() + word[1:]
def lowerfirst(word):
return word[0].lower() + word[1:]
|
krukas/Mage2Gen
|
mage2gen/utils.py
|
Python
|
gpl-3.0
| 1,144 | 0.012238 |
# (c) Copyright 2014 Cisco Systems Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Script to push the zone configuration to Cisco SAN switches.
"""
import random
import re
from eventlet import greenthread
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
import six
from cinder import exception
from cinder.i18n import _
from cinder import ssh_utils
from cinder import utils
from cinder.zonemanager.drivers.cisco import exception as c_exception
import cinder.zonemanager.drivers.cisco.fc_zone_constants as ZoneConstant
LOG = logging.getLogger(__name__)
class CiscoFCZoneClientCLI(object):
"""Cisco FC zone client cli implementation.
OpenStack Fibre Channel zone client cli connector
to manage FC zoning in Cisco SAN fabrics.
Version history:
1.0 - Initial Cisco FC zone client cli
"""
switch_ip = None
switch_port = '22'
switch_user = 'admin'
switch_pwd = 'none'
def __init__(self, ipaddress, username, password, port, vsan):
"""initializing the client."""
self.switch_ip = ipaddress
self.switch_port = port
self.switch_user = username
self.switch_pwd = password
self.fabric_vsan = vsan
self.sshpool = None
def get_active_zone_set(self):
"""Return the active zone configuration.
Return active zoneset from fabric. When none of the configurations
are active then it will return empty map.
:returns: Map -- active zone set map in the following format
.. code-block:: python
{
'zones':
{'openstack50060b0000c26604201900051ee8e329':
['50060b0000c26604', '201900051ee8e329']
},
'active_zone_config': 'OpenStack_Cfg'
}
"""
zone_set = {}
zone = {}
zone_member = None
zone_name = None
switch_data = None
zone_set_name = None
try:
switch_data = self._get_switch_info(
[ZoneConstant.GET_ACTIVE_ZONE_CFG, self.fabric_vsan,
' | no-more'])
except c_exception.CiscoZoningCliException:
with excutils.save_and_reraise_exception():
LOG.error("Failed getting active zone set "
"from fabric %s", self.switch_ip)
try:
for line in switch_data:
# Split on non-word characters,
line_split = re.split(r'[\s\[\]]+', line)
if ZoneConstant.CFG_ZONESET in line_split:
# zoneset name [name] vsan [vsan]
zone_set_name = \
line_split[line_split.index(ZoneConstant.CFG_ZONESET)
+ 2]
continue
if ZoneConstant.CFG_ZONE in line_split:
# zone name [name] vsan [vsan]
zone_name = \
line_split[line_split.index(ZoneConstant.CFG_ZONE) + 2]
zone[zone_name] = list()
continue
if ZoneConstant.CFG_ZONE_MEMBER in line_split:
# Examples:
# pwwn c0:50:76:05:15:9f:00:12
# * fcid 0x1e01c0 [pwwn 50:05:07:68:02:20:48:04] [V7K_N1P2]
zone_member = \
line_split[
line_split.index(ZoneConstant.CFG_ZONE_MEMBER) + 1]
zone_member_list = zone.get(zone_name)
zone_member_list.append(zone_member)
zone_set[ZoneConstant.CFG_ZONES] = zone
zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG] = zone_set_name
except Exception as ex:
# In case of parsing error here, it should be malformed cli output.
msg = _("Malformed zone configuration: (switch=%(switch)s "
"zone_config=%(zone_config)s)."
) % {'switch': self.switch_ip,
'zone_config': switch_data}
LOG.error(msg)
exc_msg = _("Exception: %s") % six.text_type(ex)
LOG.error(exc_msg)
raise exception.FCZoneDriverException(reason=msg)
return zone_set
def add_zones(self, zones, activate, fabric_vsan, active_zone_set,
zone_status):
"""Add zone configuration.
This method will add the zone configuration passed by user.
:param zones: Zone names mapped to members and VSANs
Zone members are colon separated but case-insensitive
.. code-block:: python
{ zonename1:[zonememeber1,zonemember2,...],
zonename2:[zonemember1, zonemember2,...]...}
e.g:
{
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29']
}
:param activate: True will activate the zone config.
:param fabric_vsan:
:param active_zone_set: Active zone set dict retrieved from
get_active_zone_set method
:param zone_status: Status of the zone
:raises CiscoZoningCliException:
"""
LOG.debug("Add Zones - Zones passed: %s", zones)
LOG.debug("Active zone set: %s", active_zone_set)
zone_list = active_zone_set[ZoneConstant.CFG_ZONES]
LOG.debug("zone list: %s", zone_list)
LOG.debug("zone status: %s", zone_status)
cfg_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG]
zone_cmds = [['conf'],
['zoneset', 'name', cfg_name, 'vsan', fabric_vsan]]
for zone in zones.keys():
zone_cmds.append(['zone', 'name', zone])
for member in zones[zone]:
zone_cmds.append(['member', 'pwwn', member])
zone_cmds.append(['end'])
try:
LOG.debug("Add zones: Config cmd to run: %s", zone_cmds)
self._ssh_execute(zone_cmds, True, 1)
if activate:
self.activate_zoneset(cfg_name, fabric_vsan, zone_status)
self._cfg_save()
except Exception as e:
msg = _("Creating and activating zone set failed: "
"(Zone set=%(zoneset)s error=%(err)s)."
) % {'zoneset': cfg_name, 'err': six.text_type(e)}
LOG.error(msg)
raise c_exception.CiscoZoningCliException(reason=msg)
def update_zones(self, zones, activate, fabric_vsan, operation,
active_zone_set, zone_status):
"""Update the zone configuration.
This method will update the zone configuration passed by user.
:param zones: zone names mapped to members. Zone members
are colon separated but case-insensitive
.. code-block:: python
{ zonename1:[zonememeber1, zonemember2,...],
zonename2:[zonemember1, zonemember2,...]...}
e.g:
{
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04',
'20:19:00:05:1e:e8:e3:29']
}
:param activate: True will activate the zone config.
:param operation: zone add or zone remove
:param fabric_vsan: Virtual San #
:param active_zone_set: Active zone set dict retrieved from
get_active_zone_set method
:param zone_status: Status of the zone
:raises CiscoZoningCliException:
"""
LOG.debug("Update Zones - Operation: %(op)s - Zones "
"passed: %(zones)s",
{'op': operation, 'zones': zones})
cfg_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG]
zone_cmds = [['conf'],
['zoneset', 'name', cfg_name, 'vsan', fabric_vsan]]
zone_mod_cmd = []
if operation == ZoneConstant.ZONE_ADD:
zone_mod_cmd = ['member', 'pwwn']
elif operation == ZoneConstant.ZONE_REMOVE:
zone_mod_cmd = ['no', 'member', 'pwwn']
for zone, zone_members in zones.items():
zone_cmds.append(['zone', 'name', zone])
for member in zone_members:
zone_cmds.append(zone_mod_cmd + [member])
zone_cmds.append(['end'])
try:
LOG.debug("Update zones: Config cmd to run: %s", zone_cmds)
self._ssh_execute(zone_cmds, True, 1)
if activate:
self.activate_zoneset(cfg_name, fabric_vsan, zone_status)
self._cfg_save()
except Exception as e:
msg = (_("Updating and activating zone set failed: "
"(Zone set=%(zoneset)s error=%(err)s).")
% {'zoneset': cfg_name, 'err': six.text_type(e)})
LOG.error(msg)
raise c_exception.CiscoZoningCliException(reason=msg)
def activate_zoneset(self, cfgname, fabric_vsan, zone_status):
"""Method to Activate the zone config. Param cfgname - ZonesetName."""
LOG.debug("zone status: %s", zone_status)
cmd_list = [['conf'],
['zoneset', 'activate', 'name', cfgname, 'vsan',
self.fabric_vsan]]
if zone_status['mode'] == 'enhanced':
cmd_list.append(['zone', 'commit', 'vsan', fabric_vsan])
cmd_list.append(['end'])
return self._ssh_execute(cmd_list, True, 1)
def get_zoning_status(self):
"""Return the zoning mode and session for a zoneset."""
zone_status = {}
try:
switch_data = self._get_switch_info(
[ZoneConstant.GET_ZONE_STATUS, self.fabric_vsan])
except c_exception.CiscoZoningCliException:
with excutils.save_and_reraise_exception():
LOG.error("Failed getting zone status "
"from fabric %s", self.switch_ip)
try:
for line in switch_data:
# Split on non-word characters,
line_split = re.split(r'[\s\[\]]+', line)
if 'mode:' in line_split:
# mode: <enhanced|basic>
zone_status['mode'] = line_split[line_split.index('mode:')
+ 1]
continue
if 'session:' in line_split:
# session: <none|a value other than none>
zone_status['session'] = \
line_split[line_split.index('session:') + 1]
continue
except Exception as ex:
# In case of parsing error here, it should be malformed cli output.
msg = _("Malformed zone status: (switch=%(switch)s "
"zone_config=%(zone_config)s)."
) % {'switch': self.switch_ip,
'zone_status': switch_data}
LOG.error(msg)
exc_msg = _("Exception: %s") % six.text_type(ex)
LOG.error(exc_msg)
raise exception.FCZoneDriverException(reason=msg)
return zone_status
def delete_zones(self, zone_names, activate, fabric_vsan, active_zone_set,
zone_status):
"""Delete zones from fabric.
Method to delete the active zone config zones
params zone_names: zoneNames separated by semicolon
params activate: True/False
"""
LOG.debug("zone_names %s", zone_names)
active_zoneset_name = active_zone_set[ZoneConstant.ACTIVE_ZONE_CONFIG]
cmds = [['conf'],
['zoneset', 'name', active_zoneset_name, 'vsan',
fabric_vsan]]
try:
for zone in set(zone_names.split(';')):
cmds.append(['no', 'zone', 'name', zone])
cmds.append(['end'])
LOG.debug("Delete zones: Config cmd to run: %s", cmds)
self._ssh_execute(cmds, True, 1)
if activate:
self.activate_zoneset(active_zoneset_name, fabric_vsan,
zone_status)
self._cfg_save()
except Exception as e:
msg = _("Deleting zones failed: (command=%(cmd)s error=%(err)s)."
) % {'cmd': cmds, 'err': six.text_type(e)}
LOG.error(msg)
raise c_exception.CiscoZoningCliException(reason=msg)
def get_nameserver_info(self):
"""Get name server data from fabric.
This method will return the connected node port wwn list(local
and remote) for the given switch fabric
show fcns database
"""
cli_output = None
return_list = []
try:
cli_output = self._get_switch_info([ZoneConstant.FCNS_SHOW,
self.fabric_vsan])
except c_exception.CiscoZoningCliException:
with excutils.save_and_reraise_exception():
LOG.error("Failed collecting fcns database "
"info for fabric %s", self.switch_ip)
if (cli_output):
return_list = self._parse_ns_output(cli_output)
LOG.info("Connector returning fcnsinfo-%s", return_list)
return return_list
@utils.retry(processutils.ProcessExecutionError, retries=5)
def _cfg_save(self):
cmd = ['copy', 'running-config', 'startup-config']
self._run_ssh(cmd, True)
def _get_switch_info(self, cmd_list):
stdout, stderr, sw_data = None, None, None
try:
stdout, stderr = self._run_ssh(cmd_list, True)
LOG.debug("CLI output from ssh - output: %s", stdout)
if (stdout):
sw_data = stdout.splitlines()
return sw_data
except processutils.ProcessExecutionError as e:
msg = _("Error while getting data via ssh: (command=%(cmd)s "
"error=%(err)s).") % {'cmd': cmd_list,
'err': six.text_type(e)}
LOG.error(msg)
raise c_exception.CiscoZoningCliException(reason=msg)
def _parse_ns_output(self, switch_data):
"""Parses name server data.
Parses nameserver raw data and adds the device port wwns to the list
:returns: List -- list of device port wwn from ns info
"""
return_list = []
for line in switch_data:
if not(" N " in line):
continue
linesplit = line.split()
if len(linesplit) > 2:
node_port_wwn = linesplit[2]
return_list.append(node_port_wwn)
else:
msg = _("Malformed show fcns database string: %s") % line
LOG.error(msg)
raise exception.InvalidParameterValue(err=msg)
return return_list
def _run_ssh(self, cmd_list, check_exit_code=True):
command = ' '.join(cmd_list)
if not self.sshpool:
self.sshpool = ssh_utils.SSHPool(self.switch_ip,
self.switch_port,
None,
self.switch_user,
self.switch_pwd,
min_size=1,
max_size=5)
try:
with self.sshpool.item() as ssh:
return processutils.ssh_execute(
ssh,
command,
check_exit_code=check_exit_code)
except Exception:
with excutils.save_and_reraise_exception():
LOG.warning("Error running SSH command: %s", command)
def _ssh_execute(self, cmd_list, check_exit_code=True, attempts=1):
"""Execute cli with status update.
Executes CLI commands where status return is expected.
cmd_list is a list of commands, where each command is itself
a list of parameters. We use utils.check_ssh_injection to check each
command, but then join then with " ; " to form a single command.
"""
# Check that each command is secure
for cmd in cmd_list:
utils.check_ssh_injection(cmd)
# Combine into a single command.
command = ' ; '.join(map(lambda x: ' '.join(x), cmd_list))
if not self.sshpool:
self.sshpool = ssh_utils.SSHPool(self.switch_ip,
self.switch_port,
None,
self.switch_user,
self.switch_pwd,
min_size=1,
max_size=5)
stdin, stdout, stderr = None, None, None
LOG.debug("Executing command via ssh: %s", command)
last_exception = None
try:
with self.sshpool.item() as ssh:
while attempts > 0:
attempts -= 1
try:
stdin, stdout, stderr = ssh.exec_command(command)
channel = stdout.channel
exit_status = channel.recv_exit_status()
LOG.debug("Exit Status from ssh: %s", exit_status)
# exit_status == -1 if no exit code was returned
if exit_status != -1:
LOG.debug('Result was %s', exit_status)
if check_exit_code and exit_status != 0:
raise processutils.ProcessExecutionError(
exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=command)
else:
return True
else:
return True
except Exception as e:
LOG.exception('Error executing SSH command.')
last_exception = e
greenthread.sleep(random.randint(20, 500) / 100.0)
LOG.debug("Handling error case after SSH: %s", last_exception)
try:
raise processutils.ProcessExecutionError(
exit_code=last_exception.exit_code,
stdout=last_exception.stdout,
stderr=last_exception.stderr,
cmd=last_exception.cmd)
except AttributeError:
raise processutils.ProcessExecutionError(
exit_code=-1,
stdout="",
stderr="Error running SSH command",
cmd=command)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception("Error executing command via ssh.")
finally:
if stdin:
stdin.flush()
stdin.close()
if stdout:
stdout.close()
if stderr:
stderr.close()
def cleanup(self):
self.sshpool = None
|
openstack/cinder
|
cinder/zonemanager/drivers/cisco/cisco_fc_zone_client_cli.py
|
Python
|
apache-2.0
| 20,088 | 0 |
# Generated by Django 2.0 on 2017-12-06 09:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('profile', '0001_squashed_0005_auto_20170408_1400'),
]
operations = [
migrations.AlterField(
model_name='Profile',
name='theme',
field=models.CharField(blank=True, choices=[('style', 'Lexpage'), ('style_nowel', 'Nowel'), ('style_st_patrick', 'Saint-Patrick'), ('style_halloween', 'Halloween')], help_text='Laissez vide pour adopter automatiquement le thème du moment.', max_length=16, null=True, verbose_name='Thème'),
),
]
|
AlexandreDecan/Lexpage
|
app/profile/migrations/0002_auto_20171206_0943.py
|
Python
|
gpl-3.0
| 651 | 0.001541 |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from oslo_utils import encodeutils
import six
from keystone.i18n import _, _LW
CONF = cfg.CONF
LOG = log.getLogger(__name__)
# Tests use this to make exception message format errors fatal
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class Error(Exception):
"""Base error class.
Child classes should define an HTTP status code, title, and a
message_format.
"""
code = None
title = None
message_format = None
def __init__(self, message=None, **kwargs):
try:
message = self._build_message(message, **kwargs)
except KeyError:
# if you see this warning in your logs, please raise a bug report
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
LOG.warning(_LW('missing exception kwargs (programmer error)'))
message = self.message_format
super(Error, self).__init__(message)
def _build_message(self, message, **kwargs):
"""Builds and returns an exception message.
:raises: KeyError given insufficient kwargs
"""
if not message:
try:
message = self.message_format % kwargs
except UnicodeDecodeError:
try:
kwargs = dict([(k, encodeutils.safe_decode(v)) for k, v in
six.iteritems(kwargs)])
except UnicodeDecodeError:
# NOTE(jamielennox): This is the complete failure case
# at least by showing the template we have some idea
# of where the error is coming from
message = self.message_format
else:
message = self.message_format % kwargs
return message
class ValidationError(Error):
message_format = _("Expecting to find %(attribute)s in %(target)s -"
" the server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class SchemaValidationError(ValidationError):
# NOTE(lbragstad): For whole OpenStack message consistency, this error
# message has been written in a format consistent with WSME.
message_format = _("%(detail)s")
class ValidationTimeStampError(Error):
message_format = _("Timestamp not in expected format."
" The server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class StringLengthExceeded(ValidationError):
message_format = _("String length exceeded.The length of"
" string '%(string)s' exceeded the limit"
" of column %(type)s(CHAR(%(length)d)).")
class ValidationSizeError(Error):
message_format = _("Request attribute %(attribute)s must be"
" less than or equal to %(size)i. The server"
" could not comply with the request because"
" the attribute size is invalid (too large)."
" The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class CircularRegionHierarchyError(Error):
message_format = _("The specified parent region %(parent_region_id)s "
"would create a circular region hierarchy.")
code = 400
title = 'Bad Request'
class PasswordVerificationError(Error):
message_format = _("The password length must be less than or equal "
"to %(size)i. The server could not comply with the "
"request because the password is invalid.")
code = 403
title = 'Forbidden'
class RegionDeletionError(Error):
message_format = _("Unable to delete region %(region_id)s because it or "
"its child regions have associated endpoints.")
code = 403
title = 'Forbidden'
class PKITokenExpected(Error):
message_format = _('The certificates you requested are not available. '
'It is likely that this server does not use PKI tokens '
'otherwise this is the result of misconfiguration.')
code = 403
title = 'Cannot retrieve certificates'
class SecurityError(Error):
"""Avoids exposing details of security failures, unless in debug mode."""
amendment = _('(Disable debug mode to suppress these details.)')
def _build_message(self, message, **kwargs):
"""Only returns detailed messages in debug mode."""
if CONF.debug:
return _('%(message)s %(amendment)s') % {
'message': message or self.message_format % kwargs,
'amendment': self.amendment}
else:
return self.message_format % kwargs
class Unauthorized(SecurityError):
message_format = _("The request you have made requires authentication.")
code = 401
title = 'Unauthorized'
class AuthPluginException(Unauthorized):
message_format = _("Authentication plugin error.")
def __init__(self, *args, **kwargs):
super(AuthPluginException, self).__init__(*args, **kwargs)
self.authentication = {}
class MissingGroups(Unauthorized):
message_format = _("Unable to find valid groups while using "
"mapping %(mapping_id)s")
class AuthMethodNotSupported(AuthPluginException):
message_format = _("Attempted to authenticate with an unsupported method.")
def __init__(self, *args, **kwargs):
super(AuthMethodNotSupported, self).__init__(*args, **kwargs)
self.authentication = {'methods': CONF.auth.methods}
class AdditionalAuthRequired(AuthPluginException):
message_format = _("Additional authentications steps required.")
def __init__(self, auth_response=None, **kwargs):
super(AdditionalAuthRequired, self).__init__(message=None, **kwargs)
self.authentication = auth_response
class Forbidden(SecurityError):
message_format = _("You are not authorized to perform the"
" requested action.")
code = 403
title = 'Forbidden'
class ForbiddenAction(Forbidden):
message_format = _("You are not authorized to perform the"
" requested action: %(action)s")
class ImmutableAttributeError(Forbidden):
message_format = _("Could not change immutable attribute(s) "
"'%(attributes)s' in target %(target)s")
class CrossBackendNotAllowed(Forbidden):
message_format = _("Group membership across backend boundaries is not "
"allowed, group in question is %(group_id)s, "
"user is %(user_id)s")
class InvalidPolicyAssociation(Forbidden):
message_format = _("Invalid mix of entities for policy association - "
"only Endpoint, Service or Region+Service allowed. "
"Request was - Endpoint: %(endpoint_id)s, "
"Service: %(service_id)s, Region: %(region_id)s")
class NotFound(Error):
message_format = _("Could not find: %(target)s")
code = 404
title = 'Not Found'
class EndpointNotFound(NotFound):
message_format = _("Could not find endpoint: %(endpoint_id)s")
class MetadataNotFound(NotFound):
"""(dolph): metadata is not a user-facing concept,
so this exception should not be exposed
"""
message_format = _("An unhandled exception has occurred:"
" Could not find metadata.")
class PolicyNotFound(NotFound):
message_format = _("Could not find policy: %(policy_id)s")
class PolicyAssociationNotFound(NotFound):
message_format = _("Could not find policy association")
class RoleNotFound(NotFound):
message_format = _("Could not find role: %(role_id)s")
class RoleAssignmentNotFound(NotFound):
message_format = _("Could not find role assignment with role: "
"%(role_id)s, user or group: %(actor_id)s, "
"project or domain: %(target_id)s")
class RegionNotFound(NotFound):
message_format = _("Could not find region: %(region_id)s")
class ServiceNotFound(NotFound):
message_format = _("Could not find service: %(service_id)s")
class DomainNotFound(NotFound):
message_format = _("Could not find domain: %(domain_id)s")
class ProjectNotFound(NotFound):
message_format = _("Could not find project: %(project_id)s")
class InvalidParentProject(NotFound):
message_format = _("Cannot create project with parent: %(project_id)s")
class TokenNotFound(NotFound):
message_format = _("Could not find token: %(token_id)s")
class UserNotFound(NotFound):
message_format = _("Could not find user: %(user_id)s")
class GroupNotFound(NotFound):
message_format = _("Could not find group: %(group_id)s")
class MappingNotFound(NotFound):
message_format = _("Could not find mapping: %(mapping_id)s")
class TrustNotFound(NotFound):
message_format = _("Could not find trust: %(trust_id)s")
class TrustUseLimitReached(Forbidden):
message_format = _("No remaining uses for trust: %(trust_id)s")
class CredentialNotFound(NotFound):
message_format = _("Could not find credential: %(credential_id)s")
class VersionNotFound(NotFound):
message_format = _("Could not find version: %(version)s")
class EndpointGroupNotFound(NotFound):
message_format = _("Could not find Endpoint Group: %(endpoint_group_id)s")
class IdentityProviderNotFound(NotFound):
message_format = _("Could not find Identity Provider: %(idp_id)s")
class ServiceProviderNotFound(NotFound):
message_format = _("Could not find Service Provider: %(sp_id)s")
class FederatedProtocolNotFound(NotFound):
message_format = _("Could not find federated protocol %(protocol_id)s for"
" Identity Provider: %(idp_id)s")
class PublicIDNotFound(NotFound):
# This is used internally and mapped to either User/GroupNotFound or,
# Assertion before the exception leaves Keystone.
message_format = "%(id)s"
class Conflict(Error):
message_format = _("Conflict occurred attempting to store %(type)s -"
" %(details)s")
code = 409
title = 'Conflict'
class UnexpectedError(SecurityError):
"""Avoids exposing details of failures, unless in debug mode."""
_message_format = _("An unexpected error prevented the server "
"from fulfilling your request.")
debug_message_format = _("An unexpected error prevented the server "
"from fulfilling your request: %(exception)s")
@property
def message_format(self):
"""Return the generic message format string unless debug is enabled."""
if CONF.debug:
return self.debug_message_format
return self._message_format
def _build_message(self, message, **kwargs):
if CONF.debug and 'exception' not in kwargs:
# Ensure that exception has a value to be extra defensive for
# substitutions and make sure the exception doesn't raise an
# exception.
kwargs['exception'] = ''
return super(UnexpectedError, self)._build_message(message, **kwargs)
code = 500
title = 'Internal Server Error'
class PolicyParsingError(UnexpectedError):
message_format = _("Unable to parse policy file %(policy_file)s.")
class TrustConsumeMaximumAttempt(UnexpectedError):
debug_message_format = _("Unable to consume trust %(trust_id)s, unable to "
"acquire lock.")
class CertificateFilesUnavailable(UnexpectedError):
debug_message_format = _("Expected signing certificates are not available "
"on the server. Please check Keystone "
"configuration.")
class MalformedEndpoint(UnexpectedError):
debug_message_format = _("Malformed endpoint URL (%(endpoint)s),"
" see ERROR log for details.")
class MappedGroupNotFound(UnexpectedError):
debug_message_format = _("Group %(group_id)s returned by mapping "
"%(mapping_id)s was not found in the backend.")
class MetadataFileError(UnexpectedError):
message_format = _("Error while reading metadata file, %(reason)s")
class AssignmentTypeCalculationError(UnexpectedError):
message_format = _(
'Unexpected combination of grant attributes - '
'User: %(user_id)s, Group: %(group_id)s, Project: %(project_id)s, '
'Domain: %(domain_id)s')
class NotImplemented(Error):
message_format = _("The action you have requested has not"
" been implemented.")
code = 501
title = 'Not Implemented'
class Gone(Error):
message_format = _("The service you have requested is no"
" longer available on this server.")
code = 410
title = 'Gone'
class ConfigFileNotFound(UnexpectedError):
debug_message_format = _("The Keystone configuration file %(config_file)s "
"could not be found.")
class MultipleSQLDriversInConfig(UnexpectedError):
message_format = _('The Keystone domain configuration file '
'%(config_file)s defines an additional SQL driver - '
'only one is permitted.')
class MigrationNotProvided(Exception):
def __init__(self, mod_name, path):
super(MigrationNotProvided, self).__init__(_(
"%(mod_name)s doesn't provide database migrations. The migration"
" repository path at %(path)s doesn't exist or isn't a directory."
) % {'mod_name': mod_name, 'path': path})
class UnsupportedTokenVersionException(Exception):
"""Token version is unrecognizable or unsupported."""
pass
class SAMLSigningError(UnexpectedError):
debug_message_format = _('Unable to sign SAML assertion. It is likely '
'that this server does not have xmlsec1 '
'installed, or this is the result of '
'misconfiguration. Reason %(reason)s')
title = 'Error signing SAML assertion'
class OAuthHeadersMissingError(UnexpectedError):
debug_message_format = _('No Authorization headers found, cannot proceed '
'with OAuth related calls, if running under '
'HTTPd or Apache, ensure WSGIPassAuthorization '
'is set to On.')
title = 'Error retrieving OAuth headers'
|
UTSA-ICS/keystone-kerberos
|
keystone/exception.py
|
Python
|
apache-2.0
| 15,384 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
class event_confirm_registration(osv.osv_memory):
"""
Confirm Event Registration
"""
_name = "event.confirm.registration"
_description = "Confirmation for Event Registration"
_columns = {
'msg': fields.text('Message', readonly=True),
}
_defaults = {
'msg': 'The event limit is reached. What do you want to do?'
}
def default_get(self, cr, uid, fields, context=None):
"""
This function gets default values
"""
if context is None:
context = {}
registration_pool = self.pool.get('event.registration')
registration_ids = context.get('registration_ids', [])
res = super(event_confirm_registration, self).default_get(cr, uid, fields, context=context)
msg = ""
overlimit_event_ids = []
for registration in registration_pool.browse(cr, uid, registration_ids, context=context):
register_max = registration.event_id.register_max
if registration.event_id.id not in overlimit_event_ids:
overlimit_event_ids.append(registration.event_id.id)
msg += _("Warning: The Event '%s' has reached its Maximum Limit (%s).") \
%(registration.event_id.name, register_max)
if 'msg' in fields:
res.update({'msg': msg})
return res
def confirm(self, cr, uid, ids, context=None):
if context is None:
context = {}
registration_pool = self.pool.get('event.registration')
registration_ids = context.get('registration_ids', [])
registration_pool.do_open(cr, uid, registration_ids, context=context)
return {'type': 'ir.actions.act_window_close'}
event_confirm_registration()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
crmccreary/openerp_server
|
openerp/addons/event/wizard/event_confirm_registration.py
|
Python
|
agpl-3.0
| 2,865 | 0.002792 |
from setuptools import setup, find_packages
import os
import codecs
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open
return codecs.open(os.path.join(here, *parts), 'r').read()
install_requires = [
"click==6.6",
"jinja2==2.8"
]
setup(
name='logrotated',
version="0.0.3",
url='https://github.com/nir0s/logrotated',
author='nir0s',
author_email='nir36g@gmail.com',
license='LICENSE',
platforms='All',
description='A logrotate human friendly interface.',
long_description=read('README.rst'),
packages=find_packages(exclude=[]),
package_data={'logrotated': ['resources/logrotate']},
entry_points={
'console_scripts': [
'rotatethis = logrotated.logrotated:main',
]
},
install_requires=install_requires
)
|
nir0s/logrotated
|
setup.py
|
Python
|
apache-2.0
| 879 | 0 |
from snovault import upgrade_step
@upgrade_step('publication', '', '2')
def publication(value, system):
# http://redmine.encodedcc.org/issues/2591
value['identifiers'] = []
if 'references' in value:
for reference in value['references']:
value['identifiers'].append(reference)
del value['references']
# http://redmine.encodedcc.org/issues/2725
# /labs/encode-consortium/
value['lab'] = "cb0ef1f6-3bd3-4000-8636-1c5b9f7000dc"
# /awards/ENCODE/
value['award'] = "b5736134-3326-448b-a91a-894aafb77876"
if 'dbxrefs' in value:
unique_dbxrefs = set(value['dbxrefs'])
value['dbxrefs'] = list(unique_dbxrefs)
@upgrade_step('publication', '2', '3')
def publication_2_3(value, system):
# http://redmine.encodedcc.org/issues/3063
if 'identifiers' in value:
value['identifiers'] = list(set(value['identifiers']))
if 'datasets' in value:
value['datasets'] = list(set(value['datasets']))
if 'categories' in value:
value['categories'] = list(set(value['categories']))
if 'published_by' in value:
value['published_by'] = list(set(value['published_by']))
# Upgrade 3 to 4 in item.py.
@upgrade_step('publication', '4', '5')
def publication_4_5(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-3646
if value['status'] == 'planned':
value['status'] = 'in preparation'
elif value['status'] == 'replaced':
value['status'] = 'deleted'
elif value['status'] in ['in press', 'in revision']:
value['status'] = 'submitted'
@upgrade_step('publication', '5', '6')
def publication_5_6(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-3708
if value['status'] == 'published':
value['status'] = 'released'
elif value['status'] == 'submitted':
value['status'] = 'in progress'
elif value['status'] == 'in preparation':
value['status'] = 'in progress'
else:
pass
|
T2DREAM/t2dream-portal
|
src/encoded/upgrade/publication.py
|
Python
|
mit
| 1,991 | 0 |
from django.shortcuts import render
from datetime import date, datetime, timedelta
from .models import Event, SponsoredContent
from pytz import timezone
def index(request):
now = datetime.now(timezone('Australia/Sydney')).date()
if now.isoweekday() in [5, 6, 7]:
weekend_start = now
else:
weekend_start = now + timedelta((5 - now.isoweekday()) % 7)
events = Event.objects.filter(start__lte = weekend_start, end__gte = weekend_start, status = Event.PUBLISHED_STATUS).order_by('-start')
sponsoredContent = SponsoredContent.objects.filter(start__lte = now, end__gte = now, status = SponsoredContent.PUBLISHED_STATUS).first()
return render(request, 'index.html', {'events': events, 'sponsoredContent': sponsoredContent},)
|
coreymcdermott/artbot
|
artbot_website/views.py
|
Python
|
mit
| 793 | 0.022699 |
#!/usr/bin/env python
"""
Python Tetris is a clunky pygame Tetris clone. Feel free to make it better!!
Copyright (C) 2008 Nick Crafford <nickcrafford@earthlink.net>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import random
import pygame
from pygame.locals import *
class Tetromino(object):
def __init__(self,first_x,first_y,mask_color,color, t):
self.first_x = first_x
self.first_y = first_y
self.color = color
self.mask_color = mask_color
self.positions = []
self.max_x = 0
self.min_x = 0
self.max_y = 0
self.currentPosition = 0
self.oldPosition = 0
self.active = True
self.id = random.random()
self.volume = 1.0
self.mfile = '../sound/cluck.wav'
self.freq = 44100
self.bitsize = -32
self.channels = 1
self.buffer = 4096
#Tetromino Switch Statement
if t == 'I':
self.I()
elif t == 'O':
self.O()
elif t == 'T':
self.T()
elif t == 'S':
self.S()
elif t == 'Z':
self.Z()
elif t == 'L':
self.L()
elif t == 'J':
self.J()
#Initialize Sound
if pygame.mixer:
pygame.mixer.init(self.freq, self.bitsize, self.channels, self.buffer)
pygame.mixer.music.set_volume(self.volume)
self.cluck = pygame.mixer.Sound(self.mfile)
def move(self, grid, x_direction, y_direction):
self.max_x = 0
self.min_x = 0
self.max_y = 0
max_x_pos = 0
min_x_pos = 50
max_y_pos = 0
if self.active:
#Render Current Position in color
if grid.accept(self.id,self.positions[self.currentPosition],x_direction,y_direction):
#Set all to mask color
pos = self.positions[self.currentPosition]
for idx in range(len(pos)):
grid.set(self.mask_color,pos[idx][0],pos[idx][1],0)
for posIdx in range(len(self.positions)):
pos = self.positions[posIdx]
for idx in range(len(pos)):
pos[idx] = (pos[idx][0]+x_direction,pos[idx][1]+y_direction)
x = pos[idx][0]
y = pos[idx][1]
if posIdx == self.currentPosition:
grid.set(self.color,x,y,self.id)
if y > max_y_pos:
max_y_pos = y
if x > max_x_pos:
max_x_pos = x
if x < min_x_pos:
min_x_pos = x
self.max_x = max_x_pos*grid.cell_width + grid.cell_width
self.min_x = min_x_pos*grid.cell_width
self.max_y = max_y_pos*grid.cell_height + grid.cell_height
else:
self.cluck.play()
self.active = False
def rotate(self,grid):
self.max_y = 0
if self.active:
self.oldPosition = self.currentPosition
pos = self.positions[self.oldPosition]
for idx in range(len(pos)):
grid.set(self.mask_color,pos[idx][0],pos[idx][1],0)
if self.currentPosition < len(self.positions)-1:
self.currentPosition += 1
else:
self.currentPosition = 0
self.move(grid,0,0)
def I(self):
self.color = (49,199,239)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+2, self.first_y), (self.first_x+3, self.first_y)])
self.positions.append([(self.first_x+2, self.first_y-2), (self.first_x+2, self.first_y-1),
(self.first_x+2, self.first_y), (self.first_x+2, self.first_y+1)])
def O(self):
self.color = (247,211,8)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y-1),
(self.first_x+1, self.first_y), (self.first_x, self.first_y-1)])
def T(self):
self.color = (173,77,156)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+2, self.first_y), (self.first_x+1, self.first_y-1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+2, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y-1)])
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+2, self.first_y), (self.first_x+1, self.first_y+1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y-1)])
def S(self):
self.color = (66,182,66)
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+2, self.first_y+1)])
self.positions.append([(self.first_x+2, self.first_y), (self.first_x+2, self.first_y+1),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y+2)])
def Z(self):
self.color = (239,32,41)
self.positions.append([(self.first_x, self.first_y+1), (self.first_x+1, self.first_y+1),
(self.first_x+1, self.first_y), (self.first_x+2, self.first_y)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+1, self.first_y+1),
(self.first_x+2, self.first_y+1), (self.first_x+2, self.first_y+2)])
def L(self):
self.color = (90,101,173)
self.positions.append([(self.first_x, self.first_y), (self.first_x, self.first_y+1),
(self.first_x+1, self.first_y+1), (self.first_x+2, self.first_y+1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+1, self.first_y+1),
(self.first_x, self.first_y+2), (self.first_x+1, self.first_y+2)])
self.positions.append([(self.first_x, self.first_y+1), (self.first_x+1, self.first_y+1),
(self.first_x+2, self.first_y+1), (self.first_x+2, self.first_y+2)])
self.positions.append([(self.first_x+2, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y+2)])
def J(self):
self.color = (239,121,33)
self.positions.append([(self.first_x, self.first_y+1), (self.first_x+1, self.first_y+1),
(self.first_x+2, self.first_y+1), (self.first_x+2, self.first_y)])
self.positions.append([(self.first_x, self.first_y), (self.first_x+1, self.first_y),
(self.first_x+1, self.first_y+1), (self.first_x+1, self.first_y+2)])
self.positions.append([(self.first_x, self.first_y+1), (self.first_x, self.first_y+2),
(self.first_x+1, self.first_y+1), (self.first_x+2, self.first_y+1)])
self.positions.append([(self.first_x+1, self.first_y), (self.first_x+1, self.first_y+1),
(self.first_x+1, self.first_y+2), (self.first_x+2, self.first_y+2)])
|
nickcrafford/python-pygame-tetris
|
src/Tetromino.py
|
Python
|
gpl-3.0
| 8,712 | 0.015725 |
#!/usr/bin/python
# logging.basicConfig
# Messages on screen or file like object - StreamHandlers
# logging.Formatter
# man date/https://docs.python.org/2/library/time.html#time.strftime
import logging
logging.basicConfig(filename="disk.log",filemode='a',level=logging.DEBUG,format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',datefmt='%c')
# modes
# r - read mode - reading a file.
# w - write mode - write to a file. if file doesnot exist it should create it.
# if it exist truncates it to zero.
# a - append mode - appends contents to the file.
disk_size = input("plese enter the disk size:")
if disk_size < 40:
logging.info("Your disk looks healthy at {}".format(disk_size))
elif disk_size < 60:
logging.warning("Your disk is getting filled up {}".format(disk_size))
elif disk_size < 90:
logging.error("your disk is stomach full. It going to burst out {}".format(disk_size))
elif disk_size < 100:
logging.critical("your application is sleeping {}".format(disk_size))
|
tuxfux-hlp-notes/python-batches
|
archieves/batch-56/logging/second.py
|
Python
|
gpl-3.0
| 1,002 | 0.005988 |
from collections import namedtuple
from prophyc.generators import base, word_wrap
INDENT_STR = u" "
MAX_LINE_WIDTH = 100
DocStr = namedtuple("DocStr", "block, inline")
def _form_doc(model_node, max_inl_docstring_len, indent_level):
block_doc, inline_doc = "", ""
if model_node.docstring:
if len(model_node.docstring) <= max_inl_docstring_len and "\n" not in model_node.docstring:
inline_doc = u" // {}".format(model_node.docstring)
elif model_node.docstring:
block_doc = u"\n" + "".join(
_gen_multi_line_doc(model_node.docstring, indent_level=indent_level, block_header=model_node.name))
return DocStr(block_doc, inline_doc)
schema_line_breaker = word_wrap.BreakLinesByWidth(MAX_LINE_WIDTH, " ", "/* ", " * ", " ", " */")
@schema_line_breaker
def _gen_multi_line_doc(block_comment_text, indent_level=0, block_header=""):
assert "\n" not in block_header, "Will not work with line breaks in header bar."
if block_header:
if len(block_comment_text) >= 250:
schema_line_breaker.make_a_bar("-" if indent_level else "=", block_header)
yield block_header
for paragraph in block_comment_text.split("\n"):
yield paragraph
def _columnizer(model_node, column_splitter, max_line_width=100):
members_table = [column_splitter(m) for m in model_node.members]
widths = [max(len(str(r)) for r in g) for g in zip(*members_table)]
max_inline_comment_width = max_line_width - sum(widths)
for member, columns in zip(model_node.members, members_table):
doc = _form_doc(member, max_inline_comment_width, indent_level=1)
if doc.block:
yield doc.block
yield u"\n" + INDENT_STR
for is_not_last, (cell_width, cell_str) in enumerate(zip(widths, columns), 1 - len(columns)):
yield cell_str
padding = u" " * (max(0, cell_width - len(cell_str)))
if is_not_last:
yield padding
elif doc.inline:
yield padding + doc.inline
if model_node.members:
yield "\n"
def generate_schema_container(model_node, designator, column_splitter):
if model_node.docstring:
block_docstring = u"".join(_gen_multi_line_doc(model_node.docstring, indent_level=0,
block_header=model_node.name))
if block_docstring:
block_docstring += u"\n"
else:
block_docstring = u""
members = u"".join(_columnizer(model_node, column_splitter, max_line_width=100))
return u"{}{} {} {{{}}};".format(block_docstring, designator, model_node.name, members)
class SchemaTranslator(base.TranslatorBase):
block_template = u'''{content}'''
@staticmethod
def translate_include(include):
doc = _form_doc(include, 50, indent_level=0)
return u"{d.block}#include \"{0.name}\"{d.inline}".format(include, d=doc)
@staticmethod
def translate_constant(constant):
doc = _form_doc(constant, max_inl_docstring_len=50, indent_level=0)
return u"{d.block}\n{0.name} = {0.value};{d.inline}".format(constant, d=doc)
@staticmethod
def translate_enum(enumerator):
def column_selector(member):
value = u" = {};".format(member.value)
return member.name, value
return generate_schema_container(enumerator, "enum", column_selector)
@staticmethod
def translate_struct(struct):
def column_selector(member):
type_ = member.value
if member.optional:
type_ += u"*"
if member.is_fixed:
name = u"{m.name}[{m.size}];"
elif member.is_limited:
name = u"{m.name}<{m.size}>;"
elif member.is_dynamic:
name = u"{m.name}<@{m.bound}>;"
elif member.greedy:
name = u"{m.name}<...>;"
else:
name = u"{m.name};"
return type_, u" ", name.format(m=member)
return generate_schema_container(struct, u"struct", column_selector)
@staticmethod
def translate_union(union):
def column_selector(member):
discriminator = u"{}: ".format(member.discriminator)
field_type = member.value
field_name = u" {};".format(member.name)
return discriminator, field_type, field_name
return generate_schema_container(union, u"union", column_selector)
@classmethod
def _make_lines_splitter(cls, previous_node_type, current_node_type):
if not previous_node_type:
return u""
if previous_node_type == "Include" and current_node_type != "Include":
return u"\n\n"
if previous_node_type in ("Struct", "Union") or current_node_type in ("Enum", "Struct", "Union"):
return u"\n\n\n"
if previous_node_type != current_node_type:
return u"\n\n"
return u"\n"
class SchemaGenerator(base.GeneratorBase):
top_level_translators = {
'.prophy': SchemaTranslator,
}
|
aurzenligl/prophy
|
prophyc/generators/prophy.py
|
Python
|
mit
| 5,118 | 0.00254 |
from distutils.core import setup
import os
import glob
setup(
name = 'pyspecfit',
url = 'http://justincely.github.io',
version = '0.0.1',
description = 'interact with IRAF task specfit I/O products',
author = 'Justin Ely',
author_email = 'ely@stsci.edu',
keywords = ['astronomy'],
classifiers = ['Programming Language :: Python',
'Development Status :: 1 - Planning',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Scientific/Engineering :: Physics',
'Topic :: Software Development :: Libraries :: Python Modules'],
packages = ['pyspecfit']
)
|
justincely/pyspecfit
|
setup.py
|
Python
|
bsd-3-clause
| 731 | 0.025992 |
# coding: utf-8
import bisect
import contextlib
import csv
from datetime import datetime
import functools
import json
import logging
import random
import threading
import time
import unittest
import uuid
import redis
QUIT = False
SAMPLE_COUNT = 100
config_connection = None
# 代码清单 5-1
# <start id="recent_log"/>
# 设置一个字典,它可以帮助我们将大部分日志的安全级别转换成某种一致的东西。
SEVERITY = {
logging.DEBUG: 'debug',
logging.INFO: 'info',
logging.WARNING: 'warning',
logging.ERROR: 'error',
logging.CRITICAL: 'critical',
}
SEVERITY.update((name, name) for name in SEVERITY.values())
def log_recent(conn, name, message, severity=logging.INFO, pipe=None):
# 尝试将日志的级别转换成简单的字符串。
severity = str(SEVERITY.get(severity, severity)).lower()
# 创建负责存储消息的键。
destination = 'recent:%s:%s'%(name, severity)
# 将当前时间添加到消息里面,用于记录消息的发送时间。
message = time.asctime() + ' ' + message
# 使用流水线来将通信往返次数降低为一次。
pipe = pipe or conn.pipeline()
# 将消息添加到日志列表的最前面。
pipe.lpush(destination, message)
# 对日志列表进行修剪,让它只包含最新的100条消息。
pipe.ltrim(destination, 0, 99)
# 执行两个命令。
pipe.execute()
# <end id="recent_log"/>
# 代码清单 5-2
# <start id="common_log"/>
def log_common(conn, name, message, severity=logging.INFO, timeout=5):
# 设置日志的级别。
severity = str(SEVERITY.get(severity, severity)).lower()
# 负责存储最新日志的键。
destination = 'common:%s:%s'%(name, severity)
# 因为程序每小时需要轮换一次日志,所以它使用一个键来记录当前所处的小时数。
start_key = destination + ':start'
pipe = conn.pipeline()
end = time.time() + timeout
while time.time() < end:
try:
# 对记录当前小时数的键进行监视,确保轮换操作可以正确地执行。
pipe.watch(start_key)
# 取得当前时间。
now = datetime.utcnow().timetuple()
# 取得当前所处的小时数。
hour_start = datetime(*now[:4]).isoformat()
existing = pipe.get(start_key)
# 创建一个事务。
pipe.multi()
# 如果目前的常见日志列表是上一个小时的……
if existing and existing < hour_start:
# ……那么将旧的常见日志信息进行归档。
pipe.rename(destination, destination + ':last')
pipe.rename(start_key, destination + ':pstart')
# 更新当前所处的小时数。
pipe.set(start_key, hour_start)
# 对记录日志出现次数的计数器执行自增操作。
pipe.zincrby(destination, message)
# log_recent()函数负责记录日志并调用execute()函数。
log_recent(pipe, name, message, severity, pipe)
return
except redis.exceptions.WatchError:
# 如果程序因为其他客户端在执行归档操作而出现监视错误,那么重试。
continue
# <end id="common_log"/>
# 代码清单 5-3
# <start id="update_counter"/>
# 以秒为单位的计数器精度,分别为1秒钟、5秒钟、1分钟、5分钟、1小时、5小时、1天——用户可以按需调整这些精度。
PRECISION = [1, 5, 60, 300, 3600, 18000, 86400] #A
def update_counter(conn, name, count=1, now=None):
# 通过取得当前时间来判断应该对哪个时间片执行自增操作。
now = now or time.time()
# 为了保证之后的清理工作可以正确地执行,这里需要创建一个事务型流水线。
pipe = conn.pipeline()
# 为我们记录的每种精度都创建一个计数器。
for prec in PRECISION:
# 取得当前时间片的开始时间。
pnow = int(now / prec) * prec
# 创建负责存储计数信息的散列。
hash = '%s:%s'%(prec, name)
# 将计数器的引用信息添加到有序集合里面,
# 并将其分值设置为0,以便在之后执行清理操作。
pipe.zadd('known:', hash, 0)
# 对给定名字和精度的计数器进行更新。
pipe.hincrby('count:' + hash, pnow, count)
pipe.execute()
# <end id="update_counter"/>
# 代码清单 5-4
# <start id="get_counter"/>
def get_counter(conn, name, precision):
# 取得存储着计数器数据的键的名字。
hash = '%s:%s'%(precision, name)
# 从Redis里面取出计数器数据。
data = conn.hgetall('count:' + hash)
# 将计数器数据转换成指定的格式。
to_return = []
for key, value in data.iteritems():
to_return.append((int(key), int(value)))
# 对数据进行排序,把旧的数据样本排在前面。
to_return.sort()
return to_return
# <end id="get_counter"/>
# <start id="clean_counters"/>
def clean_counters(conn):
pipe = conn.pipeline(True)
# 为了平等地处理更新频率各不相同的多个计数器,程序需要记录清理操作执行的次数。
passes = 0
# 持续地对计数器进行清理,直到退出为止。
while not QUIT:
# 记录清理操作开始执行的时间,用于计算清理操作执行的时长。
start = time.time()
# 渐进地遍历所有已知的计数器。
index = 0
while index < conn.zcard('known:'):
# 取得被检查计数器的数据。
hash = conn.zrange('known:', index, index)
index += 1
if not hash:
break
hash = hash[0]
# 取得计数器的精度。
prec = int(hash.partition(':')[0])
# 因为清理程序每60秒钟就会循环一次,
# 所以这里需要根据计数器的更新频率来判断是否真的有必要对计数器进行清理。
bprec = int(prec // 60) or 1
# 如果这个计数器在这次循环里不需要进行清理,
# 那么检查下一个计数器。
# (举个例子,如果清理程序只循环了三次,而计数器的更新频率为每5分钟一次,
# 那么程序暂时还不需要对这个计数器进行清理。)
if passes % bprec:
continue
hkey = 'count:' + hash
# 根据给定的精度以及需要保留的样本数量,
# 计算出我们需要保留什么时间之前的样本。
cutoff = time.time() - SAMPLE_COUNT * prec
# 获取样本的开始时间,并将其从字符串转换为整数。
samples = map(int, conn.hkeys(hkey))
# 计算出需要移除的样本数量。
samples.sort()
remove = bisect.bisect_right(samples, cutoff)
# 按需移除计数样本。
if remove:
conn.hdel(hkey, *samples[:remove])
# 这个散列可能已经被清空。
if remove == len(samples):
try:
# 在尝试修改计数器散列之前,对其进行监视。
pipe.watch(hkey)
# 验证计数器散列是否为空,如果是的话,
# 那么从记录已知计数器的有序集合里面移除它。
if not pipe.hlen(hkey):
pipe.multi()
pipe.zrem('known:', hash)
pipe.execute()
# 在删除了一个计数器的情况下,
# 下次循环可以使用与本次循环相同的索引。
index -= 1
else:
# 计数器散列并不为空,
# 继续让它留在记录已有计数器的有序集合里面。
pipe.unwatch()
# 有其他程序向这个计算器散列添加了新的数据,
# 它已经不再是空的了,继续让它留在记录已知计数器的有序集合里面。
except redis.exceptions.WatchError:
pass
# 为了让清理操作的执行频率与计数器更新的频率保持一致,
# 对记录循环次数的变量以及记录执行时长的变量进行更新。
passes += 1
duration = min(int(time.time() - start) + 1, 60)
# 如果这次循环未耗尽60秒钟,那么在余下的时间内进行休眠;
# 如果60秒钟已经耗尽,那么休眠一秒钟以便稍作休息。
time.sleep(max(60 - duration, 1))
# <end id="clean_counters"/>
# 代码清单 5-6
# <start id="update_stats"/>
def update_stats(conn, context, type, value, timeout=5):
# 设置用于存储统计数据的键。
destination = 'stats:%s:%s'%(context, type)
# 像common_log()函数一样,
# 处理当前这一个小时的数据和上一个小时的数据。
start_key = destination + ':start'
pipe = conn.pipeline(True)
end = time.time() + timeout
while time.time() < end:
try:
pipe.watch(start_key)
now = datetime.utcnow().timetuple()
hour_start = datetime(*now[:4]).isoformat()
existing = pipe.get(start_key)
pipe.multi()
if existing and existing < hour_start:
pipe.rename(destination, destination + ':last')
pipe.rename(start_key, destination + ':pstart')
pipe.set(start_key, hour_start)
tkey1 = str(uuid.uuid4())
tkey2 = str(uuid.uuid4())
# 将值添加到临时键里面。
pipe.zadd(tkey1, 'min', value)
pipe.zadd(tkey2, 'max', value)
# 使用合适聚合函数MIN和MAX,
# 对存储统计数据的键和两个临时键进行并集计算。
pipe.zunionstore(destination,
[destination, tkey1], aggregate='min')
pipe.zunionstore(destination,
[destination, tkey2], aggregate='max')
# 删除临时键。
pipe.delete(tkey1, tkey2)
# 对有序集合中的样本数量、值的和、值的平方之和三个成员进行更新。
pipe.zincrby(destination, 'count')
pipe.zincrby(destination, 'sum', value)
pipe.zincrby(destination, 'sumsq', value*value)
# 返回基本的计数信息,以便函数调用者在有需要时做进一步的处理。
return pipe.execute()[-3:]
except redis.exceptions.WatchError:
# 如果新的一个小时已经开始,并且旧的数据已经被归档,那么进行重试。
continue
# <end id="update_stats"/>
# 代码清单 5-7
# <start id="get_stats"/>
def get_stats(conn, context, type):
# 程序将从这个键里面取出统计数据。
key = 'stats:%s:%s'%(context, type)
# 获取基本的统计数据,并将它们都放到一个字典里面。
data = dict(conn.zrange(key, 0, -1, withscores=True))
# 计算平均值。
data['average'] = data['sum'] / data['count']
# 计算标准差的第一个步骤。
numerator = data['sumsq'] - data['sum'] ** 2 / data['count']
# 完成标准差的计算工作。
data['stddev'] = (numerator / (data['count'] - 1 or 1)) ** .5
return data
# <end id="get_stats"/>
# 代码清单 5-8
# <start id="access_time_context_manager"/>
# 将这个Python生成器用作上下文管理器。
@contextlib.contextmanager
def access_time(conn, context):
# 记录代码块执行前的时间。
start = time.time()
# 运行被包裹的代码块。
yield
# 计算代码块的执行时长。
delta = time.time() - start
# 更新这一上下文的统计数据。
stats = update_stats(conn, context, 'AccessTime', delta)
# 计算页面的平均访问时长。
average = stats[1] / stats[0]
pipe = conn.pipeline(True)
# 将页面的平均访问时长添加到记录最慢访问时间的有序集合里面。
pipe.zadd('slowest:AccessTime', context, average)
# AccessTime有序集合只会保留最慢的100条记录。
pipe.zremrangebyrank('slowest:AccessTime', 0, -101)
pipe.execute()
# <end id="access_time_context_manager"/>
# <start id="access_time_use"/>
# 这个视图(view)接受一个Redis连接以及一个生成内容的回调函数为参数。
def process_view(conn, callback):
# 计算并记录访问时长的上下文管理器就是这样包围代码块的。
with access_time(conn, request.path):
# 当上下文管理器中的yield语句被执行时,这个语句就会被执行。
return callback()
# <end id="access_time_use"/>
# 代码清单 5-9
# <start id="_1314_14473_9188"/>
def ip_to_score(ip_address):
score = 0
for v in ip_address.split('.'):
score = score * 256 + int(v, 10)
return score
# <end id="_1314_14473_9188"/>
# 代码清单 5-10
# <start id="_1314_14473_9191"/>
# 这个函数在执行时需要给定GeoLiteCity-Blocks.csv文件所在的位置。
def import_ips_to_redis(conn, filename):
csv_file = csv.reader(open(filename, 'rb'))
for count, row in enumerate(csv_file):
# 按需将IP地址转换为分值。
start_ip = row[0] if row else ''
if 'i' in start_ip.lower():
continue
if '.' in start_ip:
start_ip = ip_to_score(start_ip)
elif start_ip.isdigit():
start_ip = int(start_ip, 10)
else:
# 略过文件的第一行以及格式不正确的条目。
continue
# 构建唯一城市ID。
city_id = row[2] + '_' + str(count)
# 将城市ID及其对应的IP地址分值添加到有序集合里面。
conn.zadd('ip2cityid:', city_id, start_ip)
# <end id="_1314_14473_9191"/>
# 代码清单 5-11
# <start id="_1314_14473_9194"/>
# 这个函数在执行时需要给定GeoLiteCity-Location.csv文件所在的位置。
def import_cities_to_redis(conn, filename):
for row in csv.reader(open(filename, 'rb')):
if len(row) < 4 or not row[0].isdigit():
continue
row = [i.decode('latin-1') for i in row]
# 准备好需要添加到散列里面的信息。
city_id = row[0]
country = row[1]
region = row[2]
city = row[3]
# 将城市信息添加到Redis里面。
conn.hset('cityid2city:', city_id,
json.dumps([city, region, country]))
# <end id="_1314_14473_9194"/>
# 代码清单 5-12
# <start id="_1314_14473_9197"/>
def find_city_by_ip(conn, ip_address):
# 将IP地址转换为分值以便执行ZREVRANGEBYSCORE命令。
if isinstance(ip_address, str): #A
ip_address = ip_to_score(ip_address) #A
# 查找唯一城市ID。
city_id = conn.zrevrangebyscore( #B
'ip2cityid:', ip_address, 0, start=0, num=1) #B
if not city_id:
return None
# 将唯一城市ID转换为普通城市ID。
city_id = city_id[0].partition('_')[0] #C
# 从散列里面取出城市信息。
return json.loads(conn.hget('cityid2city:', city_id)) #D
# <end id="_1314_14473_9197"/>
# 代码清单 5-13
# <start id="is_under_maintenance"/>
LAST_CHECKED = None
IS_UNDER_MAINTENANCE = False
def is_under_maintenance(conn):
# 将两个变量设置为全局变量以便在之后对它们进行写入。
global LAST_CHECKED, IS_UNDER_MAINTENANCE #A
# 距离上次检查是否已经超过1秒钟?
if LAST_CHECKED < time.time() - 1: #B
# 更新最后检查时间。
LAST_CHECKED = time.time() #C
# 检查系统是否正在进行维护。
IS_UNDER_MAINTENANCE = bool( #D
conn.get('is-under-maintenance')) #D
# 返回一个布尔值,用于表示系统是否正在进行维护。
return IS_UNDER_MAINTENANCE #E
# <end id="is_under_maintenance"/>
# 代码清单 5-14
# <start id="set_config"/>
def set_config(conn, type, component, config):
conn.set(
'config:%s:%s'%(type, component),
json.dumps(config))
# <end id="set_config"/>
#END
# 代码清单 5-15
# <start id="get_config"/>
CONFIGS = {}
CHECKED = {}
def get_config(conn, type, component, wait=1):
key = 'config:%s:%s'%(type, component)
# 检查是否需要对这个组件的配置信息进行更新。
if CHECKED.get(key) < time.time() - wait:
# 有需要对配置进行更新,记录最后一次检查这个连接的时间。
CHECKED[key] = time.time()
# 取得Redis存储的组件配置。
config = json.loads(conn.get(key) or '{}')
# 将潜在的Unicode关键字参数转换为字符串关键字参数。
config = dict((str(k), config[k]) for k in config)
# 取得组件正在使用的配置。
old_config = CONFIGS.get(key)
# 如果两个配置并不相同……
if config != old_config:
# ……那么对组件的配置进行更新。
CONFIGS[key] = config
return CONFIGS.get(key)
# <end id="get_config"/>
# 代码清单 5-16
# <start id="redis_connection"/>
REDIS_CONNECTIONS = {}
# 将应用组件的名字传递给装饰器。
def redis_connection(component, wait=1): #A
# 因为函数每次被调用都需要获取这个配置键,所以我们干脆把它缓存起来。
key = 'config:redis:' + component #B
# 包装器接受一个函数作为参数,并使用另一个函数来包裹这个函数。
def wrapper(function): #C
# 将被包裹函数里的一些有用的元数据复制到配置处理器。
@functools.wraps(function) #D
# 创建负责管理连接信息的函数。
def call(*args, **kwargs): #E
# 如果有旧配置存在,那么获取它。
old_config = CONFIGS.get(key, object()) #F
# 如果有新配置存在,那么获取它。
_config = get_config( #G
config_connection, 'redis', component, wait) #G
config = {}
# 对配置进行处理并将其用于创建Redis连接。
for k, v in _config.iteritems(): #L
config[k.encode('utf-8')] = v #L
# 如果新旧配置并不相同,那么创建新的连接。
if config != old_config: #H
REDIS_CONNECTIONS[key] = redis.Redis(**config) #H
# 将Redis连接以及其他匹配的参数传递给被包裹函数,然后调用函数并返回执行结果。
return function( #I
REDIS_CONNECTIONS.get(key), *args, **kwargs) #I
# 返回被包裹的函数。
return call #J
# 返回用于包裹Redis函数的包装器。
return wrapper #K
# <end id="redis_connection"/>
# 代码清单 5-17
'''
# <start id="recent_log_decorator"/>
@redis_connection('logs') # redis_connection()装饰器非常容易使用。
def log_recent(conn, app, message): # 这个函数的定义和之前展示的一样,没有发生任何变化。
'the old log_recent() code'
log_recent('main', 'User 235 logged in') # 我们再也不必在调用log_recent()函数时手动地向它传递日志服务器的连接了。
# <end id="recent_log_decorator"/>
'''
#--------------- 以下是用于测试代码的辅助函数 --------------------------------
class request:
pass
# a faster version with pipelines for actual testing
def import_ips_to_redis(conn, filename):
csv_file = csv.reader(open(filename, 'rb'))
pipe = conn.pipeline(False)
for count, row in enumerate(csv_file):
start_ip = row[0] if row else ''
if 'i' in start_ip.lower():
continue
if '.' in start_ip:
start_ip = ip_to_score(start_ip)
elif start_ip.isdigit():
start_ip = int(start_ip, 10)
else:
continue
city_id = row[2] + '_' + str(count)
pipe.zadd('ip2cityid:', city_id, start_ip)
if not (count+1) % 1000:
pipe.execute()
pipe.execute()
def import_cities_to_redis(conn, filename):
pipe = conn.pipeline(False)
for count, row in enumerate(csv.reader(open(filename, 'rb'))):
if len(row) < 4 or not row[0].isdigit():
continue
row = [i.decode('latin-1') for i in row]
city_id = row[0]
country = row[1]
region = row[2]
city = row[3]
pipe.hset('cityid2city:', city_id,
json.dumps([city, region, country]))
if not (count+1) % 1000:
pipe.execute()
pipe.execute()
class TestCh05(unittest.TestCase):
def setUp(self):
global config_connection
import redis
self.conn = config_connection = redis.Redis(db=15)
self.conn.flushdb()
def tearDown(self):
self.conn.flushdb()
del self.conn
global config_connection, QUIT, SAMPLE_COUNT
config_connection = None
QUIT = False
SAMPLE_COUNT = 100
print
print
def test_log_recent(self):
import pprint
conn = self.conn
print "Let's write a few logs to the recent log"
for msg in xrange(5):
log_recent(conn, 'test', 'this is message %s'%msg)
recent = conn.lrange('recent:test:info', 0, -1)
print "The current recent message log has this many messages:", len(recent)
print "Those messages include:"
pprint.pprint(recent[:10])
self.assertTrue(len(recent) >= 5)
def test_log_common(self):
import pprint
conn = self.conn
print "Let's write some items to the common log"
for count in xrange(1, 6):
for i in xrange(count):
log_common(conn, 'test', "message-%s"%count)
common = conn.zrevrange('common:test:info', 0, -1, withscores=True)
print "The current number of common messages is:", len(common)
print "Those common messages are:"
pprint.pprint(common)
self.assertTrue(len(common) >= 5)
def test_counters(self):
import pprint
global QUIT, SAMPLE_COUNT
conn = self.conn
print "Let's update some counters for now and a little in the future"
now = time.time()
for delta in xrange(10):
update_counter(conn, 'test', count=random.randrange(1,5), now=now+delta)
counter = get_counter(conn, 'test', 1)
print "We have some per-second counters:", len(counter)
self.assertTrue(len(counter) >= 10)
counter = get_counter(conn, 'test', 5)
print "We have some per-5-second counters:", len(counter)
print "These counters include:"
pprint.pprint(counter[:10])
self.assertTrue(len(counter) >= 2)
print
tt = time.time
def new_tt():
return tt() + 2*86400
time.time = new_tt
print "Let's clean out some counters by setting our sample count to 0"
SAMPLE_COUNT = 0
t = threading.Thread(target=clean_counters, args=(conn,))
t.setDaemon(1) # to make sure it dies if we ctrl+C quit
t.start()
time.sleep(1)
QUIT = True
time.time = tt
counter = get_counter(conn, 'test', 86400)
print "Did we clean out all of the counters?", not counter
self.assertFalse(counter)
def test_stats(self):
import pprint
conn = self.conn
print "Let's add some data for our statistics!"
for i in xrange(5):
r = update_stats(conn, 'temp', 'example', random.randrange(5, 15))
print "We have some aggregate statistics:", r
rr = get_stats(conn, 'temp', 'example')
print "Which we can also fetch manually:"
pprint.pprint(rr)
self.assertTrue(rr['count'] >= 5)
def test_access_time(self):
import pprint
conn = self.conn
print "Let's calculate some access times..."
for i in xrange(10):
with access_time(conn, "req-%s"%i):
time.sleep(.5 + random.random())
print "The slowest access times are:"
atimes = conn.zrevrange('slowest:AccessTime', 0, -1, withscores=True)
pprint.pprint(atimes[:10])
self.assertTrue(len(atimes) >= 10)
print
def cb():
time.sleep(1 + random.random())
print "Let's use the callback version..."
for i in xrange(5):
request.path = 'cbreq-%s'%i
process_view(conn, cb)
print "The slowest access times are:"
atimes = conn.zrevrange('slowest:AccessTime', 0, -1, withscores=True)
pprint.pprint(atimes[:10])
self.assertTrue(len(atimes) >= 10)
def test_ip_lookup(self):
conn = self.conn
try:
open('GeoLiteCity-Blocks.csv', 'rb')
open('GeoLiteCity-Location.csv', 'rb')
except:
print "********"
print "You do not have the GeoLiteCity database available, aborting test"
print "Please have the following two files in the current path:"
print "GeoLiteCity-Blocks.csv"
print "GeoLiteCity-Location.csv"
print "********"
return
print "Importing IP addresses to Redis... (this may take a while)"
import_ips_to_redis(conn, 'GeoLiteCity-Blocks.csv')
ranges = conn.zcard('ip2cityid:')
print "Loaded ranges into Redis:", ranges
self.assertTrue(ranges > 1000)
print
print "Importing Location lookups to Redis... (this may take a while)"
import_cities_to_redis(conn, 'GeoLiteCity-Location.csv')
cities = conn.hlen('cityid2city:')
print "Loaded city lookups into Redis:", cities
self.assertTrue(cities > 1000)
print
print "Let's lookup some locations!"
rr = random.randrange
for i in xrange(5):
print find_city_by_ip(conn, '%s.%s.%s.%s'%(rr(1,255), rr(256), rr(256), rr(256)))
def test_is_under_maintenance(self):
print "Are we under maintenance (we shouldn't be)?", is_under_maintenance(self.conn)
self.conn.set('is-under-maintenance', 'yes')
print "We cached this, so it should be the same:", is_under_maintenance(self.conn)
time.sleep(1)
print "But after a sleep, it should change:", is_under_maintenance(self.conn)
print "Cleaning up..."
self.conn.delete('is-under-maintenance')
time.sleep(1)
print "Should be False again:", is_under_maintenance(self.conn)
def test_config(self):
print "Let's set a config and then get a connection from that config..."
set_config(self.conn, 'redis', 'test', {'db':15})
@redis_connection('test')
def test(conn2):
return bool(conn2.info())
print "We can run commands from the configured connection:", test()
if __name__ == '__main__':
unittest.main()
|
huangz1990/riacn-code
|
ch05_listing_source.py
|
Python
|
mit
| 29,256 | 0.006797 |
# -*- coding: utf-8 -*-
"""
Created on Sun May 14 22:13:58 2017
"""
#python3
"""
>>> exc_coro = demo_exc_handling()
>>> next(exc_coro)
-> coroutine started
>>> exc_coro.send(11)
-> coroutine received: 11
>>> exc_coro.send(22)
-> coroutine received: 22
>>> exc_coro.close()
>>> from inspect import getgeneratorstate
>>> getgeneratorstate(exc_coro)
'GEN_CLOSED'
>>> exc_coro = demo_exc_handling()
>>> next(exc_coro)
-> coroutine started
>>> exc_coro.send(11)
-> coroutine received: 11
>>> exc_coro.throw(DemoException)
*** DemoException handled. Continuing...
>>> getgeneratorstate(exc_coro)
'GEN_SUSPENDED'
>>> exc_coro = demo_exc_handling()
>>> next(exc_coro)
-> coroutine started
>>> exc_coro.send(11)
-> coroutine received: 11
>>> exc_coro.throw(ZeroDivisionError)
Traceback (most recent call last):
...
ZeroDivisionError
>>> getgeneratorstate(exc_coro)
'GEN_CLOSED'
"""
from inspect import getgeneratorstate
class DemoException(Exception):
"""异常类型。"""
def demo_exc_handling():
print('-> coroutine started')
while True:
try:
x = yield
except DemoException:
print('*** DemoException handled. Continuing...')
else:
print('-> coroutine received: {!r}'.format(x))
raise RuntimeError('This line should never run.')
if __name__ == "__main__":
exc_coro = demo_exc_handling()
next(exc_coro)
exc_coro.send(11)
exc_coro.send(22)
exc_coro.close()
print(getgeneratorstate(exc_coro))
|
wuqize/FluentPython
|
chapter16/coro_exc_demo.py
|
Python
|
lgpl-3.0
| 1,499 | 0.00403 |
# -*- coding: utf-8 -*-
ADMIN_MAPPING = {
'admin_user_suspend': {
'resource': 'admin/users/{id}/suspend',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1suspend%2Fput'),
'methods': ['PUT'],
},
'admin_user_unsuspend': {
'resource': 'admin/users/{id}/unsuspend',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1unsuspend%2Fput'),
'methods': ['PUT'],
},
'admin_user_block': {
'resource': 'admin/users/{id}/block',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1block%2Fput'),
'methods': ['PUT'],
},
'admin_user_unblock': {
'resource': 'admin/users/{id}/unblock',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1unblock%2Fput'),
'methods': ['PUT'],
},
'admin_user_activate': {
'resource': 'admin/users/{id}/activate',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1activate%2Fput'),
'methods': ['PUT'],
},
'admin_user_anonymize': {
'resource': 'admin/users/{id}/anonymize',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1anonymize%2Fput'),
'methods': ['PUT'],
},
'admin_api_key_generate': {
'resource': 'admin/users/{id}/generate_api_key',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1generate_api_key%2Fpost'),
'methods': ['POST'],
},
'admin_group_assign': {
'resource': 'admin/users/{id}/groups',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1groups%2Fpost'),
'methods': ['POST'],
},
'admin_group_remove': {
'resource': 'admin/users/{id}/groups/{group_id}',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1users~1%7Bid%7D~1groups~1%7Bgroup_id%7D%2Fdelete'),
'methods': ['DELETE'],
},
'admin_group_create': {
'resource': 'admin/groups',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1groups%2Fpost'),
'methods': ['POST'],
},
'admin_group_delete': {
'resource': 'admin/groups/{group_id}.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1groups~1%7Bgroup_id%7D.json%2Fdelete'),
'methods': ['DELETE'],
},
'admin_group_members_list': {
'resource': 'groups/{group_name}/members.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1groups~1%7Bgroup_name%7D~1members.json%2Fget'),
'methods': ['GET'],
},
'admin_group_members_add': {
'resource': 'groups/{group_id}/members.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1groups~1%7Bgroup_id%7D~1members.json%2Fput'),
'methods': ['PUT'],
},
'admin_group_members_delete': {
'resource': 'groups/{group_id}/members.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1groups~1%7Bgroup_id%7D~1members.json%2Fdelete'),
'methods': ['DELETE'],
},
'admin_site_settings_show': {
'resource': 'admin/site_settings.json',
'docs': ('http://docs.discourse.org/#tag/'
'Admin%2Fpaths%2F~1admin~1site_settings.json%2Fget'),
'methods': ['GET'],
},
}
|
0xc0ffeec0de/tapioca-discourse
|
tapioca_discourse/resource_mapping/admin.py
|
Python
|
mit
| 3,741 | 0.001604 |
#!/usr/bin/env python
#
# The MIT License (MIT)
#
# Copyright (c) 2015 Greg Aitkenhead
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
bead-calculator
============
A simple Python script to help peyote stitch beadworkers start their projects.
BeadCalculator checks to make sure the number of beads used in a project will
work out mathematically and lets the beadworker know what design elements
will be possible for the number of starting beads entered.
##To Use:
1. Measure the object that you'll be beading by stringing beads and wrapping
thread around the object.
2. Enter the number of beads from the initial measurement around the object.
3. BeadCalculator tells you if that number of beads will work, and if it does
not, BeadCalculator suggests an alternative number or numbers to start with.
BeadCalculator also tells the beadworker how many beads to string, how many to
add (when the first two lines of beads are added to the project), and what long
and short side design elements will be available.
"""
import cgi
import logging
import webapp2
import jinja2
from jinja2 import Environment, FileSystemLoader
ENV = Environment(loader=FileSystemLoader('templates'))
class MainHandler(webapp2.RequestHandler):
"""Renders the root of the web-app using base.html as the template."""
def get(self):
"""Create main web page."""
template = ENV.get_template('base.html')
self.response.write(template.render())
class CalcBeadResults(webapp2.RequestHandler):
"""
Run all logic and create templates depending on value of beads_entered.
Value 'beads_entered/ comes from textarea value of main-form in base.html.
"""
def get(self):
"""Gets number of beads entered from base.html form input."""
bead_input = cgi.escape(self.request.get('beads_entered'))
def sanity_check(bead_input):
"""Before running full code, check to see that the number entered
(beads), is greater than 12 and that it is divisible by 6 or 9.
If 'beads' is less than 12, print error message.
"""
if int(bead_input) < 12:
beads_user_chose = str(bead_input)
more_beads_message = "Please re-try using more than 12 beads."
template = ENV.get_template('try-again.html')
self.response.write(
template.render(
beads_user_chose=beads_user_chose,
more_beads_message=more_beads_message)
)
# Run sanity_check.
sanity_check(bead_input)
def long_short_values(bead_input):
"""
Returns short and long side numbers of design elements depending on
whether the beads_entered is mod 6, 9, or 12. If number of beads
entered is not mod 6 or 9, long_short_values finds the higher and
lower values matching the above criteria and then suggests those
numbers to the user. Also shows the new list values so that the
user can see which option offers the most design choices.
"""
# Lists of possible results for design elements (values)
# where key represents the modulo interger.
check_list = {
6: (3, 5),
9: (4, 7),
12: (5, 9)
}
pass_list = [v for k, v in check_list.iteritems()
if int(bead_input) % k == 0]
if len(pass_list) != 0 and int(bead_input) >= 12:
# Suggest starting bead number and number of beads to add.
# These formulas are based on the specific 'three drop' peyote
# stitch pattern used (as opposed to the simpler 'two drop.')
suggested = int(bead_input)
beads_to_add = suggested/3
starting_number = beads_to_add*2
pass_list = ", ".join(repr(e) for e in sorted(pass_list))
starting_number = str(starting_number)
beads_to_add = str(beads_to_add)
beads_user_chose = str(bead_input)
# If pass_list contains values, print design elements and
# start/add numbers. See /templates/pass-test.html, which
# extends base.html.
template = ENV.get_template('pass-test.html')
self.response.write(
template.render(
beads_user_chose=beads_user_chose,
pass_list=pass_list,
starting_number=starting_number,
beads_to_add=beads_to_add
)
)
if len(pass_list) == 0:
# If list contains no values, find next usable number.
higher_list = pass_list
high_bead = int(bead_input)
while len(higher_list) == 0:
# Check that new number matches modulo criteria.
high_bead += 1
higher_list = [v for k, v in check_list.iteritems()
if int(high_bead) % k == 0]
if len(higher_list) != 0 and int(bead_input) >= 12:
# If pass_list does not contain values,
# suggest usable design element numbers
# for both next bead higher and next bead lower.
high_bead = str(high_bead)
higher_list = ", ".join(repr(e) for e in
sorted(higher_list))
# Also, find the next usable number lower than beads.
lower_list = pass_list
low_bead = int(bead_input)
# Make sure number of beads is >12 to avoid low numbers.
while len(lower_list) == 0 and low_bead > 12:
# Check to see if the new number matches modulo criteria.
low_bead -= 1
lower_list = [v for k, v in check_list.iteritems()
if int(low_bead) % k == 0]
if len(lower_list) != 0:
# Suggest design elements for lower bead options.
low_bead = str(low_bead)
lower_list = ", ".join(
repr(e) for e in sorted(lower_list)
)
beads_user_chose = str(bead_input)
template = ENV.get_template('no-pass.html')
self.response.write(
template.render(
beads_user_chose=beads_user_chose,
high_bead=high_bead,
higher_list=higher_list,
low_bead=low_bead,
lower_list=lower_list
)
)
# Run long_short_values.
long_short_values(bead_input)
APP = webapp2.WSGIApplication([
('/', MainHandler),
('/bead_results', CalcBeadResults)
], debug=False)
# Extra Hanlder like 404 500 etc
def handle_500(request, response, exception):
"""Create custom error responses."""
logging.exception(exception)
response.write('Oops! This is a 500 error. ')
response.write('This program can only process numbers. ')
response.write('Please use the back arrow and try again.')
response.set_status(500)
APP.error_handlers[500] = handle_500
|
HarryLoofah/gae-bead-calculator
|
main.py
|
Python
|
mit
| 8,656 | 0.000116 |
# Configuration file for ipython.
# pylint: disable=E0602
c = get_config()
# -----------------------------------------------------------------------------
# InteractiveShellApp configuration
# -----------------------------------------------------------------------------
# A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
# Execute the given command string.
# c.InteractiveShellApp.code_to_run = ''
# lines of code to run at IPython startup.
# c.InteractiveShellApp.exec_lines = []
# Enable GUI event loop integration ('qt', 'wx', 'gtk', 'glut', 'pyglet',
# 'osx').
# c.InteractiveShellApp.gui = None
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.InteractiveShellApp.pylab = None
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.InteractiveShellApp.matplotlib = None
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an 'import *' is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.InteractiveShellApp.pylab_import_all = True
# A list of dotted module names of IPython extensions to load.
# c.InteractiveShellApp.extensions = []
# Run the module as a script.
# c.InteractiveShellApp.module_to_run = None
# dotted module name of an IPython extension to load.
# c.InteractiveShellApp.extra_extension = ''
# List of files to run at IPython startup.
# c.InteractiveShellApp.exec_files = []
# A file to be run
# c.InteractiveShellApp.file_to_run = ''
#------------------------------------------------------------------------------
# TerminalIPythonApp configuration
#------------------------------------------------------------------------------
# TerminalIPythonApp will inherit config from: BaseIPythonApplication,
# Application, InteractiveShellApp
# Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
# c.TerminalIPythonApp.pylab = None
# Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
# c.TerminalIPythonApp.verbose_crash = False
# Run the module as a script.
# c.TerminalIPythonApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.TerminalIPythonApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# Whether to overwrite existing config files when copying
# c.TerminalIPythonApp.overwrite = False
# Execute the given command string.
# c.TerminalIPythonApp.code_to_run = ''
# Set the log level by value or name.
# c.TerminalIPythonApp.log_level = 30
# lines of code to run at IPython startup.
# c.TerminalIPythonApp.exec_lines = []
# Suppress warning messages about legacy config files
# c.TerminalIPythonApp.ignore_old_config = False
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.TerminalIPythonApp.extra_config_file = u''
# dotted module name of an IPython extension to load.
# c.TerminalIPythonApp.extra_extension = ''
# A file to be run
# c.TerminalIPythonApp.file_to_run = ''
# The IPython profile to use.
# c.TerminalIPythonApp.profile = u'default'
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.TerminalIPythonApp.matplotlib = None
# If a command or file is given via the command-line, e.g. 'ipython foo.py
# c.TerminalIPythonApp.force_interact = False
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an 'import *' is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.TerminalIPythonApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# ~/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# Whether to display a banner upon starting IPython.
c.TerminalIPythonApp.display_banner = False
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.TerminalIPythonApp.copy_config_files = False
# List of files to run at IPython startup.
# c.TerminalIPythonApp.exec_files = []
# Enable GUI event loop integration ('qt', 'wx', 'gtk', 'glut', 'pyglet',
# 'osx').
# c.TerminalIPythonApp.gui = None
# A list of dotted module names of IPython extensions to load.
# c.TerminalIPythonApp.extensions = []
# Start IPython quickly by skipping the loading of config files.
# c.TerminalIPythonApp.quick = False
# The Logging format template
# c.TerminalIPythonApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
#------------------------------------------------------------------------------
# TerminalInteractiveShell configuration
#------------------------------------------------------------------------------
# TerminalInteractiveShell will inherit config from: InteractiveShell
# auto editing of files with syntax errors.
# c.TerminalInteractiveShell.autoedit_syntax = True
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.TerminalInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.TerminalInteractiveShell.ast_transformers = []
#
# c.TerminalInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.TerminalInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.TerminalInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
c.TerminalInteractiveShell.colors = 'Linux'
# Autoindent IPython code entered interactively.
# c.TerminalInteractiveShell.autoindent = True
#
# c.TerminalInteractiveShell.separate_in = '\n'
#
# c.TerminalInteractiveShell.separate_out = ''
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
c.TerminalInteractiveShell.autocall = 1
# Number of lines of your screen, used to control printing of very long strings.
# Strings longer than this number of lines will be sent through a pager instead
# of directly printed. The default value for this is 0, which means IPython
# will auto-detect your screen size every time it needs to print certain
# potentially long strings (this doesn't change the behavior of the 'print'
# keyword, it's only triggered internally). If for some reason this isn't
# working well (it needs curses support), specify it yourself. Otherwise don't
# change the default.
# c.TerminalInteractiveShell.screen_length = 0
# Set the editor used by IPython (default to $EDITOR/vi/notepad).
c.TerminalInteractiveShell.editor = '/usr/local/bin/vim'
# Shortcut style to use at the prompt
c.TerminalInteractiveShell.editing_mode = 'vi'
# The part of the banner to be printed before the profile
# c.TerminalInteractiveShell.banner1 = 'Python 2.7.1 (r271:86832, Aug 2 2012, 13:51:44) \nType "copyright", "credits" or "license" for more information.\n\nIPython 1.0.0 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
#
# c.TerminalInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# The part of the banner to be printed after the profile
# c.TerminalInteractiveShell.banner2 = ''
#
# c.TerminalInteractiveShell.separate_out2 = ''
#
# c.TerminalInteractiveShell.wildcards_case_sensitive = True
#
# c.TerminalInteractiveShell.debug = False
# Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
c.TerminalInteractiveShell.confirm_exit = False
#
# c.TerminalInteractiveShell.ipython_dir = ''
#
# c.TerminalInteractiveShell.readline_remove_delims = '-/~'
# Start logging to the default log file.
# c.TerminalInteractiveShell.logstart = False
# The name of the logfile to use.
# c.TerminalInteractiveShell.logfile = ''
# The shell program to be used for paging.
# c.TerminalInteractiveShell.pager = 'less'
# Enable magic commands to be called without the leading %.
# c.TerminalInteractiveShell.automagic = True
# Save multi-line entries as one entry in readline history
# c.TerminalInteractiveShell.multiline_history = True
#
# c.TerminalInteractiveShell.readline_use = True
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.TerminalInteractiveShell.deep_reload = False
# Start logging to the given file in append mode.
# c.TerminalInteractiveShell.logappend = ''
#
# c.TerminalInteractiveShell.xmode = 'Context'
#
# c.TerminalInteractiveShell.quiet = False
# Enable auto setting the terminal title.
# c.TerminalInteractiveShell.term_title = False
#
# c.TerminalInteractiveShell.object_info_string_level = 0
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
# c.TerminalInteractiveShell.cache_size = 1000
# 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
# c.TerminalInteractiveShell.ast_node_interactivity = 'last_expr'
# Automatically call the pdb debugger after every exception.
# c.TerminalInteractiveShell.pdb = False
#------------------------------------------------------------------------------
# PromptManager configuration
#------------------------------------------------------------------------------
# This is the primary interface for producing IPython's prompts.
# Output prompt. '\#' will be transformed to the prompt number
# c.PromptManager.out_template = 'Out[\\#]: '
# If True (default), each prompt will be right-aligned with the preceding one.
# c.PromptManager.justify = True
# Input prompt. '\#' will be transformed to the prompt number
# c.PromptManager.in_template = '{my_prompt}'
# Continuation prompt.
# c.PromptManager.in2_template = '... '
#
# c.PromptManager.color_scheme = 'Linux'
#------------------------------------------------------------------------------
# HistoryManager configuration
#------------------------------------------------------------------------------
# A class to organize all history-related functionality in one place.
# HistoryManager will inherit config from: HistoryAccessor
#
# c.HistoryManager.db_log_output = False
#
# c.HistoryManager.db_cache_size = 0
# Path to file to use for SQLite history database.
#
# By default, IPython will put the history database in the IPython profile
# directory. If you would rather share one history among profiles, you can set
# this value in each, so that they are consistent.
#
# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
# If you see IPython hanging, try setting this to something on a local disk,
# e.g::
#
# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
# c.HistoryManager.hist_file = u''
# Options for configuring the SQLite connection
#
# These options are passed as keyword args to sqlite3.connect when establishing
# database conenctions.
# c.HistoryManager.connection_options = {}
# enable the SQLite history
#
# set enabled=False to disable the SQLite history, in which case there will be
# no stored history, no SQLite connection, and no background saving thread.
# This may be necessary in some threaded environments where IPython is embedded.
# c.HistoryManager.enabled = True
#------------------------------------------------------------------------------
# ProfileDir configuration
#------------------------------------------------------------------------------
# An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
# Set the profile location directly. This overrides the logic used by the
# `profile` option.
# c.ProfileDir.location = u''
#------------------------------------------------------------------------------
# PlainTextFormatter configuration
#------------------------------------------------------------------------------
# The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
# PlainTextFormatter will inherit config from: BaseFormatter
#
# c.PlainTextFormatter.type_printers = {}
#
# c.PlainTextFormatter.newline = '\n'
#
# c.PlainTextFormatter.float_precision = ''
#
# c.PlainTextFormatter.verbose = False
#
# c.PlainTextFormatter.deferred_printers = {}
#
# c.PlainTextFormatter.pprint = True
#
# c.PlainTextFormatter.max_width = 79
#
# c.PlainTextFormatter.singleton_printers = {}
#------------------------------------------------------------------------------
# IPCompleter configuration
#------------------------------------------------------------------------------
# Extension of the completer class with IPython-specific features
# IPCompleter will inherit config from: Completer
# Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
# c.IPCompleter.omit__names = 2
# Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
# c.IPCompleter.merge_completions = True
# Instruct the completer to use __all__ for the completion
#
# Specifically, when completing on ``object.<tab>``.
#
# When True: only those names in obj.__all__ will be included.
#
# When False [default]: the __all__ attribute is ignored
# c.IPCompleter.limit_to__all__ = False
# Activate greedy completion
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
# c.IPCompleter.greedy = False
#------------------------------------------------------------------------------
# ScriptMagics configuration
#------------------------------------------------------------------------------
# Magics for talking to scripts
#
# This defines a base `%%script` cell magic for running a cell with a program in
# a subprocess, and registers a few top-level magics that call %%script with
# common interpreters.
# Extra script cell magics to define
#
# This generates simple wrappers of `%%script foo` as `%%foo`.
#
# If you want to add script magics that aren't on your path, specify them in
# script_path
# c.ScriptMagics.script_magics = []
# Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
#
# Only necessary for items in script_magics where the default path will not find
# the right interpreter.
# c.ScriptMagics.script_path = {}
|
jalanb/jab
|
ipython/profile_jalanb/ipython_config.py
|
Python
|
mit
| 18,065 | 0.001439 |
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.bootstrap import BootstrapperBase
from cerbero.bootstrap.bootstrapper import register_bootstrapper
from cerbero.enums import Platform, Architecture, Distro, DistroVersion
from cerbero.errors import ConfigurationError
from cerbero.utils import shell
import subprocess
class UnixBootstrapper (BootstrapperBase):
tool = ''
command = ''
yes_arg = ''
checks = []
packages = []
distro_packages = {}
def __init__(self, config, offline, assume_yes):
BootstrapperBase.__init__(self, config, offline)
self.assume_yes = assume_yes
def start(self):
for c in self.checks:
c()
if self.config.distro_packages_install:
packages = self.packages
if self.config.distro_version in self.distro_packages:
packages += self.distro_packages[self.config.distro_version]
extra_packages = self.config.extra_bootstrap_packages.get(
self.config.platform, None)
if extra_packages:
self.packages += extra_packages.get(self.config.distro, [])
tool = self.tool
if self.assume_yes:
tool += ' ' + self.yes_arg;
tool += ' ' + self.command;
shell.call(tool % ' '.join(self.packages))
class DebianBootstrapper (UnixBootstrapper):
tool = 'sudo apt-get'
command = 'install %s'
yes_arg = '-y'
packages = ['autotools-dev', 'automake', 'autoconf', 'libtool', 'g++',
'autopoint', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkg-config', 'gtk-doc-tools', 'libxv-dev', 'libx11-dev',
'libpulse-dev', 'python3-dev', 'texinfo', 'gettext',
'build-essential', 'pkg-config', 'doxygen', 'curl',
'libxext-dev', 'libxi-dev', 'x11proto-record-dev',
'libxrender-dev', 'libgl1-mesa-dev', 'libxfixes-dev',
'libxdamage-dev', 'libxcomposite-dev', 'libasound2-dev',
'libxml-simple-perl', 'dpkg-dev', 'debhelper',
'build-essential', 'devscripts', 'fakeroot', 'transfig',
'gperf', 'libdbus-glib-1-dev', 'wget', 'glib-networking',
'libxtst-dev', 'libxrandr-dev', 'libglu1-mesa-dev',
'libegl1-mesa-dev', 'git', 'subversion', 'xutils-dev',
'intltool', 'ccache', 'python3-setuptools', 'libssl-dev']
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
if self.config.target_platform == Platform.WINDOWS:
if self.config.arch == Architecture.X86_64:
self.packages.append('libc6:i386')
self.checks.append(self.create_debian_arch_check('i386'))
if self.config.target_platform == Platform.LINUX:
self.packages.append('chrpath')
self.packages.append('libfuse-dev')
if self.config.distro_version in [DistroVersion.DEBIAN_SQUEEZE,
DistroVersion.UBUNTU_MAVERICK, DistroVersion.UBUNTU_LUCID]:
self.packages.remove('glib-networking')
if self.config.distro_version in [DistroVersion.UBUNTU_LUCID]:
self.packages.remove('autopoint')
def create_debian_arch_check(self, arch):
def check_arch():
native_arch = shell.check_call('dpkg --print-architecture')
if native_arch == arch:
return
foreign_archs = shell.check_call('dpkg --print-foreign-architectures')
if arch in foreign_archs.split():
return
raise ConfigurationError(('Architecture %s is missing from your setup. ' + \
'You can add it with: "dpkg --add-architecture %s",' + \
' then run "apt-get update."') \
% (arch, arch))
return check_arch
class RedHatBootstrapper (UnixBootstrapper):
tool = 'dnf'
command = 'install %s'
yes_arg = '-y'
packages = ['gcc', 'gcc-c++', 'automake', 'autoconf', 'libtool',
'gettext-devel', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkgconfig', 'gtk-doc', 'curl', 'doxygen', 'texinfo',
'texinfo-tex', 'texlive-dvips', 'docbook-style-xsl',
'transfig', 'intltool', 'rpm-build', 'redhat-rpm-config',
'python3-devel', 'libXrender-devel', 'pulseaudio-libs-devel',
'libXv-devel', 'mesa-libGL-devel', 'libXcomposite-devel',
'alsa-lib-devel', 'perl-ExtUtils-MakeMaker', 'libXi-devel',
'perl-XML-Simple', 'gperf', 'gdk-pixbuf2-devel', 'wget',
'docbook-utils-pdf', 'glib-networking', 'help2man',
'dbus-devel', 'glib2-devel', 'libXrandr-devel',
'libXtst-devel', 'git', 'subversion', 'xorg-x11-util-macros',
'mesa-libEGL-devel', 'ccache', 'openssl-devel']
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
if self.config.distro_version < DistroVersion.FEDORA_23:
self.tool = 'yum'
elif self.config.distro_version in [DistroVersion.REDHAT_6, DistroVersion.REDHAT_7]:
self.tool = 'yum'
if self.config.target_platform == Platform.WINDOWS:
if self.config.arch == Architecture.X86_64:
self.packages.append('glibc.i686')
if self.config.distro_version in [DistroVersion.FEDORA_24, DistroVersion.FEDORA_25]:
self.packages.append('libncurses-compat-libs.i686')
if self.config.target_platform == Platform.LINUX:
self.packages.append('chrpath')
self.packages.append('fuse-devel')
# Use sudo to gain root access on everything except RHEL
if self.config.distro_version == DistroVersion.REDHAT_6:
self.tool = 'su -c "' + self.tool + '"'
else:
self.tool = 'sudo ' + self.tool
class OpenSuseBootstrapper (UnixBootstrapper):
tool = 'sudo zypper'
command = 'install %s'
yes_arg = '-y'
packages = ['gcc', 'automake', 'autoconf', 'gcc-c++', 'libtool',
'gettext-tools', 'make', 'cmake', 'bison', 'flex', 'yasm',
'gtk-doc', 'curl', 'doxygen', 'texinfo',
'texlive', 'docbook-xsl-stylesheets',
'transfig', 'intltool', 'patterns-openSUSE-devel_rpm_build',
'python3-devel', 'xorg-x11-libXrender-devel', 'libpulse-devel',
'xorg-x11-libXv-devel', 'Mesa-libGL-devel', 'libXcomposite-devel',
'libX11-devel', 'alsa-devel', 'libXi-devel', 'Mesa-devel',
'Mesa-libGLESv3-devel',
'perl-XML-Simple', 'gperf', 'gdk-pixbuf-devel', 'wget',
'docbook-utils', 'glib-networking', 'git', 'subversion', 'ccache',
'openssl-devel']
class ArchBootstrapper (UnixBootstrapper):
tool = 'sudo pacman'
command = ' -S %s --needed'
yes_arg = ' --noconfirm'
packages = ['intltool', 'cmake', 'doxygen', 'gtk-doc',
'libtool', 'bison', 'flex', 'automake', 'autoconf', 'make',
'curl', 'gettext', 'alsa-lib', 'yasm', 'gperf',
'docbook-xsl', 'transfig', 'libxrender',
'libxv', 'mesa', 'python3', 'wget', 'glib-networking', 'git',
'subversion', 'xorg-util-macros', 'ccache', 'openssl']
def __init__(self, config, offline, assume_yes):
UnixBootstrapper.__init__(self, config, offline, assume_yes)
has_multilib = True
try:
subprocess.check_output(["pacman", "-Sp", "gcc-multilib"])
except subprocess.CalledProcessError:
has_multilib = False
if self.config.arch == Architecture.X86_64 and has_multilib:
self.packages.append('gcc-multilib')
else:
self.packages.append('gcc')
class GentooBootstrapper (UnixBootstrapper):
tool = 'sudo emerge'
command = '-u %s'
yes_arg = '' # Does not seem interactive
packages = ['dev-util/intltool', 'sys-fs/fuse', 'dev-util/cmake',
'app-doc/doxygen', 'dev-util/gtk-doc', 'sys-devel/libtool',
'sys-devel/bison', 'sys-devel/flex', 'sys-devel/automake',
'sys-devel/autoconf', 'sys-devel/make', 'net-misc/curl',
'sys-devel/gettext', 'media-libs/alsa-lib', 'media-sound/pulseaudio',
'dev-lang/yasm', 'dev-util/gperf', 'app-text/docbook-xsl-stylesheets',
'media-gfx/transfig', 'x11-libs/libXrender', 'x11-libs/libXv',
'media-libs/mesa', 'net-misc/wget', 'net-libs/glib-networking',
'dev-libs/openssl']
class NoneBootstrapper (BootstrapperBase):
def start(self):
pass
def register_all():
register_bootstrapper(Distro.DEBIAN, DebianBootstrapper)
register_bootstrapper(Distro.REDHAT, RedHatBootstrapper)
register_bootstrapper(Distro.SUSE, OpenSuseBootstrapper)
register_bootstrapper(Distro.ARCH, ArchBootstrapper, DistroVersion.ARCH_ROLLING)
register_bootstrapper(Distro.GENTOO, GentooBootstrapper, DistroVersion.GENTOO_VERSION)
register_bootstrapper(Distro.NONE, NoneBootstrapper)
|
atsushieno/cerbero
|
cerbero/bootstrap/linux.py
|
Python
|
lgpl-2.1
| 10,061 | 0.00497 |
{
'name': 'Discount prepaid order',
'version': '1.0',
'category': 'Custom',
'description': """
Order Discount when it's prepaid and margin is between specific values
""",
'author': 'Nadia Ferreyra',
'website': '',
'depends': ['base',
'sale',
'product',
'sale_promotions_extend',
'commercial_rules',
'flask_middleware_connector',
'sale_custom'
],
'data': ['data/product_data.xml',
'data/parameters.xml',
'views/sale_order_view.xml',
'views/account_view.xml'
],
'installable': True
}
|
Comunitea/CMNT_004_15
|
project-addons/prepaid_order_discount/__manifest__.py
|
Python
|
agpl-3.0
| 697 | 0.002869 |
# coding: utf-8
"""FETCH parser tests."""
from __future__ import print_function
import unittest
import six
from modoboa_webmail.lib.fetch_parser import FetchResponseParser
from . import data
def dump_bodystructure(fp, bs, depth=0):
"""Dump a parsed BODYSTRUCTURE."""
indentation = " " * (depth * 4)
for mp in bs:
if isinstance(mp, list):
if isinstance(mp[0], list):
print("{}multipart/{}".format(indentation, mp[1]), file=fp)
dump_bodystructure(fp, mp, depth + 1)
else:
dump_bodystructure(fp, mp, depth)
elif isinstance(mp, dict):
if isinstance(mp["struct"][0], list):
print("{}multipart/{}".format(
indentation, mp["struct"][1]), file=fp)
dump_bodystructure(fp, mp["struct"][0], depth + 1)
else:
print("{}{}/{}".format(
indentation, *mp["struct"][:2]), file=fp)
fp.seek(0)
result = fp.read()
return result
class FetchParserTestCase(unittest.TestCase):
"""Test FETCH parser."""
def setUp(self):
"""Setup test env."""
self.parser = FetchResponseParser()
def _test_bodystructure_output(self, bs, expected):
"""."""
r = self.parser.parse(bs)
fp = six.StringIO()
output = dump_bodystructure(fp, r[list(r.keys())[0]]["BODYSTRUCTURE"])
fp.close()
self.assertEqual(output, expected)
return r
def test_parse_bodystructure(self):
"""Test the parsing of several responses containing BS."""
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_1, """multipart/alternative
text/plain
text/html
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_2, """multipart/mixed
text/plain
message/rfc822
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_3, """multipart/mixed
multipart/alternative
text/plain
text/html
application/pdf
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_4, """multipart/mixed
multipart/alternative
text/plain
text/html
application/octet-stream
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_5, """multipart/alternative
text/plain
text/html
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_6, """multipart/mixed
multipart/related
multipart/alternative
text/plain
text/html
image/png
image/jpeg
application/pdf
multipart/alternative
text/plain
text/html
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_7, """multipart/mixed
multipart/mixed
text/plain
application/octet-stream
""")
self._test_bodystructure_output(
data.BODYSTRUCTURE_SAMPLE_8, "text/html\n")
|
modoboa/modoboa-webmail
|
modoboa_webmail/tests/test_fetch_parser.py
|
Python
|
mit
| 3,016 | 0 |
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext, ugettext_lazy as _
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericRelation
from django.shortcuts import get_object_or_404
from django_countries import countries as COUNTRIES
from localflavor.us.us_states import STATE_CHOICES
from localflavor.ca.ca_provinces import PROVINCE_CHOICES
from tendenci.apps.forms_builder.forms.settings import FIELD_MAX_LENGTH, LABEL_MAX_LENGTH
from tendenci.apps.forms_builder.forms.managers import FormManager
from tendenci.apps.perms.models import TendenciBaseModel
from tendenci.apps.perms.object_perms import ObjectPermission
from tendenci.apps.user_groups.models import Group, GroupMembership
from tendenci.apps.site_settings.utils import get_setting
from tendenci.apps.base.fields import EmailVerificationField
from tendenci.apps.base.utils import checklist_update
from tendenci.apps.redirects.models import Redirect
from tendenci.libs.abstracts.models import OrderingBaseModel
#STATUS_DRAFT = 1
#STATUS_PUBLISHED = 2
STATUS_CHOICES = (
('draft', _("Draft")),
('published', _("Published")),
)
FIELD_CHOICES = (
("CharField", _("Text")),
("CharField/django.forms.Textarea", _("Paragraph Text")),
("BooleanField", _("Checkbox")),
("ChoiceField/django.forms.RadioSelect", _("Single-select - Radio Button")),
("ChoiceField", _("Single-select - From a List")),
("MultipleChoiceField/django.forms.CheckboxSelectMultiple", _("Multi-select - Checkboxes")),
("MultipleChoiceField", _("Multi-select - From a List")),
("EmailVerificationField", _("Email")),
("CountryField", _("Countries")),
("StateProvinceField", _("States/Provinces")),
("FileField", _("File upload")),
("DateField/django.forms.extras.SelectDateWidget", _("Date - Select")),
("DateField/django.forms.DateInput", _("Date - Text Input")),
("DateTimeField", _("Date/time")),
("CharField/tendenci.apps.forms_builder.forms.widgets.Description", _("Description")),
("CharField/tendenci.apps.forms_builder.forms.widgets.Header", _("Section Heading")),
)
FIELD_FUNCTIONS = (
("GroupSubscription", _("Subscribe to Group")),
("GroupSubscriptionAuto", _("Subscribe to Group")),
("EmailFirstName", _("First Name")),
("EmailLastName", _("Last Name")),
("EmailFullName", _("Full Name")),
("EmailPhoneNumber", _("Phone Number")),
("Recipients", _("Email to Recipients")),
)
BILLING_PERIOD_CHOICES = (
('month', _('Month(s)')),
('year', _('Year(s)')),
('week', _('Week(s)')),
('day', _('Day(s)')),
)
DUE_SORE_CHOICES = (
('start', _('start')),
('end', _('end')),
)
class Form(TendenciBaseModel):
"""
A user-built form.
"""
FIRST = 1
MIDDLE = 2
LAST = 3
FIELD_POSITION_CHOICES = (
(FIRST, _("First")),
(MIDDLE, _("Middle")),
(LAST, _("Last")),
)
INTRO_DEFAULT_NAME = _("Intro")
FIELDS_DEFAULT_NAME = _("Fields")
PRICING_DEFAULT_NAME = _("Pricings")
title = models.CharField(_("Title"), max_length=100)
slug = models.SlugField(max_length=100, unique=True)
intro = models.TextField(_("Intro"), max_length=2000, blank=True)
response = models.TextField(_("Confirmation Text"), max_length=2000, blank=True)
email_text = models.TextField(_("Email Text to Submitter"), default='', blank=True,
help_text=_("If Send email is checked, this is the text that will be sent in an email to the person submitting the form."), max_length=2000)
subject_template = models.CharField(_("Template for email subject "),
help_text=_("""Options include [title] for form title, and
name of form fields inside brackets [ ]. E.x. [first name] or
[email address]"""),
default="[title] - [first name] [last name] - [phone]",
max_length=200,
blank=True, null=True)
send_email = models.BooleanField(_("Send email"), default=False,
help_text=_("If checked, the person submitting the form will be sent an email."))
email_from = models.EmailField(_("Reply-To address"), blank=True,
help_text=_("The address the replies to the email will be sent to"))
email_copies = models.CharField(_("Send copies to"), blank=True,
help_text=_("One or more email addresses, separated by commas"),
max_length=2000)
completion_url = models.CharField(_("Completion URL"), max_length=1000, blank=True, null=True,
help_text=_("Redirect to this page after form completion. Absolute URLS should begin with http. Relative URLs should begin with a forward slash (/)."))
template = models.CharField(_('Template'), max_length=50, blank=True)
# payments
custom_payment = models.BooleanField(_("Is Custom Payment"), default=False,
help_text=_("If checked, please add pricing options below. Leave the price blank if users can enter their own amount."))
recurring_payment = models.BooleanField(_("Is Recurring Payment"), default=False,
help_text=_("If checked, please add pricing options below. Leave the price blank if users can enter their own amount. Please also add an email field as a required field with type 'email'"))
payment_methods = models.ManyToManyField("payments.PaymentMethod", blank=True)
perms = GenericRelation(ObjectPermission,
object_id_field="object_id", content_type_field="content_type")
# positions for displaying the fields
intro_position = models.IntegerField(_("Intro Position"), choices=FIELD_POSITION_CHOICES, default=FIRST)
fields_position = models.IntegerField(_("Fields Position"), choices=FIELD_POSITION_CHOICES, default=MIDDLE)
pricing_position = models.IntegerField(_("Pricing Position"), choices=FIELD_POSITION_CHOICES, default=LAST)
# variable name of form main sections
intro_name = models.CharField(_("Intro Name"), max_length=50,
default=INTRO_DEFAULT_NAME, blank=True)
fields_name = models.CharField(_("Fields Name"), max_length=50,
default=FIELDS_DEFAULT_NAME, blank=True)
pricing_name = models.CharField(_("Pricing Name"), max_length=50,
default=PRICING_DEFAULT_NAME, blank=True)
objects = FormManager()
class Meta:
verbose_name = _("Form")
verbose_name_plural = _("Forms")
permissions = (("view_form", _("Can view form")),)
app_label = 'forms'
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
# If this is the current contact form, update checklist
if str(self.pk) == get_setting('site', 'global', 'contact_form'):
checklist_update('update-contact')
super(Form, self).save(*args, **kwargs)
@models.permalink
def get_absolute_url(self):
return ("form_detail", (), {"slug": self.slug})
def get_payment_type(self):
if self.recurring_payment and self.custom_payment:
return _("Custom Recurring Payment")
if self.recurring_payment:
return _("Recurring Payment")
if self.custom_payment:
return _("Custom Payment")
def admin_link_view(self):
url = self.get_absolute_url()
return "<a href='%s'>%s</a>" % (url, ugettext("View on site"))
admin_link_view.allow_tags = True
admin_link_view.short_description = ""
def admin_link_export(self):
url = reverse("admin:forms_form_export", args=(self.id,))
return "<a href='%s'>%s</a>" % (url, ugettext("Export entries"))
admin_link_export.allow_tags = True
admin_link_export.short_description = ""
def has_files(self):
for field in self.fields.all():
if field.field_type == 'FileField':
return True
return False
class FieldManager(models.Manager):
"""
Only show visible fields when displaying actual form..
"""
def visible(self):
return self.filter(visible=True)
"""
Get all Auto-fields. (As of writing, this is only GroupSubscriptionAuto)
"""
def auto_fields(self):
return self.filter(visible=False, field_function="GroupSubscriptionAuto")
class Field(OrderingBaseModel):
"""
A field for a user-built form.
'field_function' has the following options:
"GroupSubscription"
- Subscribes form entries to the group specified
- Required to be a BooleanField
"EmailFirstName", "EmailLastName", "EmailPhoneNumber", "EmailFullName"
- Markers for specific fields that need to be referenced in emails
- Required to be a CharField
- Includes their respective values to the email's subject
"""
form = models.ForeignKey("Form", related_name="fields")
label = models.CharField(_("Label"), max_length=LABEL_MAX_LENGTH)
field_type = models.CharField(_("Type"), choices=FIELD_CHOICES,
max_length=64)
field_function = models.CharField(_("Special Functionality"),
choices=FIELD_FUNCTIONS, max_length=64, null=True, blank=True)
required = models.BooleanField(_("Required"), default=True)
visible = models.BooleanField(_("Visible"), default=True)
choices = models.CharField(_("Choices"), max_length=1000, blank=True,
help_text=_("Comma separated options where applicable"))
default = models.CharField(_("Default"), max_length=1000, blank=True,
help_text=_("Default value of the field"))
objects = FieldManager()
class Meta:
verbose_name = _("Field")
verbose_name_plural = _("Fields")
#order_with_respect_to = "form"
app_label = 'forms'
def __unicode__(self):
return self.label
def get_field_class(self):
if "/" in self.field_type:
field_class, field_widget = self.field_type.split("/")
else:
field_class, field_widget = self.field_type, None
return field_class
def get_field_widget(self):
if "/" in self.field_type:
field_class, field_widget = self.field_type.split("/")
else:
field_class, field_widget = self.field_type, None
return field_widget
def get_choices(self):
if self.field_type == 'CountryField':
exclude_list = ['GB', 'US', 'CA']
countries = ((name,name) for key,name in COUNTRIES if key not in exclude_list)
initial_choices = ((_('United States'), _('United States')),
(_('Canada'), _('Canada')),
(_('United Kingdom'), _('United Kingdom')),
('','-----------'))
choices = initial_choices + tuple(countries)
elif self.field_type == 'StateProvinceField':
choices = (('','-----------'),) + STATE_CHOICES + PROVINCE_CHOICES
choices = sorted(choices)
elif self.field_function == 'Recipients':
choices = [(label+':'+val, label) for label, val in (i.split(":") for i in self.choices.split(","))]
else:
choices = [(val, val) for val in self.choices.split(",")]
return choices
def execute_function(self, entry, value, user=None):
if self.field_function in ["GroupSubscription", "GroupSubscriptionAuto"]:
if value:
for val in self.choices.split(','):
group, created = Group.objects.get_or_create(name=val.strip())
if user and group.allow_self_add:
try:
group_membership = GroupMembership.objects.get(group=group, member=user)
except GroupMembership.DoesNotExist:
group_membership = GroupMembership(group=group, member=user)
group_membership.creator_id = user.id
group_membership.creator_username = user.username
group_membership.role = 'subscriber'
group_membership.owner_id = user.id
group_membership.owner_username = user.username
group_membership.save()
class FormEntry(models.Model):
"""
An entry submitted via a user-built form.
"""
form = models.ForeignKey("Form", related_name="entries")
entry_time = models.DateTimeField(_("Date/time"))
entry_path = models.CharField(max_length=200, blank=True, default="")
payment_method = models.ForeignKey('payments.PaymentMethod', null=True)
pricing = models.ForeignKey('Pricing', null=True)
creator = models.ForeignKey(User, related_name="formentry_creator", null=True, on_delete=models.SET_NULL)
create_dt = models.DateTimeField(auto_now_add=True)
update_dt = models.DateTimeField(auto_now=True)
class Meta:
verbose_name = _("Form entry")
verbose_name_plural = _("Form entries")
app_label = 'forms'
def __unicode__(self):
return unicode(self.id)
@models.permalink
def get_absolute_url(self):
return ("form_entry_detail", (), {"id": self.pk})
def entry_fields(self):
return self.fields.all().order_by('field__position')
def get_name_email(self):
"""Try to figure out the name and email from this entry
Assume: 1) email field type is EmailField
2) use the labels to identify the name.
We might need a better solution because this will not work
if the form is not in English, or labels for names are not
'first name', 'last name' or 'name'.
Update: You can now use the special functions that start with
"Email" to mark fields you need for this method
instead of relying on the label as a marker.
"""
field_entries = self.fields.all()
first_name = ""
last_name = ""
name = ""
email = ""
for entry in field_entries:
field = entry.field
if field.field_type.lower() == 'emailfield':
email = entry.value
if field.field_type.lower() == 'emailverificationfield':
email = entry.value
if field.label.lower() in ['name']:
name = entry.value
if field.label.lower() in ['first name']:
first_name = entry.value
if field.label.lower() in ['last name']:
last_name = entry.value
if not name:
if first_name or last_name:
name = '%s %s' % (first_name, last_name)
if not name:
# pick the name from email
if email:
if '@' in email:
name, domain = email.split('@')
else:
name = email
return (name, email)
def get_value_of(self, field_function):
"""
Returns the value of the a field entry based
on the field_function specified
"""
for entry in self.fields.order_by('field__position'):
if entry.field.field_function == field_function:
return entry.value
return ''
def get_type_of(self, field_type):
"""
Returns the value of the a field entry based
on the field_type specified
"""
for entry in self.fields.all():
if entry.field.field_type.lower() == field_type:
return entry.value
return ''
def get_first_name(self):
return self.get_value_of("EmailFirstName")
def get_last_name(self):
return self.get_value_of("EmailLastName")
def get_full_name(self):
return self.get_value_of("EmailFullName")
def get_phone_number(self):
return self.get_value_of("EmailPhoneNumber")
def get_function_email_recipients(self):
email_list = set()
for entry in self.fields.order_by('field__position'):
if entry.field.field_function == 'Recipients' and entry.value:
if entry.field.field_type == 'BooleanField':
for email in entry.field.choices.split(","):
email_list.add(email.strip())
else:
for email in entry.value.split(","):
email = email.split(":")
if len(email) > 1:
email_list.add(email[1].strip())
return email_list
def get_email_address(self):
return self.get_type_of("emailverificationfield")
# Called by payments_pop_by_invoice_user in Payment model.
def get_payment_description(self, inv):
"""
The description will be sent to payment gateway and displayed on invoice.
If not supplied, the default description will be generated.
This will pass the First Name and Last Name from the "Billing Information" screen, the value in the "Site Display Name"
setting in Site Settings, and the name of the form that was submitted.
"""
description = '%s Invoice %d, form: "%s", Form Entry Id (%d), billed to: %s %s.' % (
get_setting('site', 'global', 'sitedisplayname'),
inv.id,
self.form.title,
inv.object_id,
inv.bill_to_first_name,
inv.bill_to_last_name,
)
return description
def set_group_subscribers(self):
for entry in self.fields.filter(field__field_function__in=["GroupSubscription", "GroupSubscriptionAuto"]):
entry.field.execute_function(self, entry.value, user=self.creator)
class FieldEntry(models.Model):
"""
A single field value for a form entry submitted via a user-built form.
"""
entry = models.ForeignKey("FormEntry", related_name="fields")
field = models.ForeignKey("Field", related_name="field")
value = models.CharField(max_length=FIELD_MAX_LENGTH)
class Meta:
verbose_name = _("Form field entry")
verbose_name_plural = _("Form field entries")
app_label = 'forms'
def __unicode__(self):
return ('%s: %s' % (self.field.label, self.value))
def include_in_email(self):
widget = self.field.get_field_widget()
field_class = self.field.get_field_class()
if widget == 'tendenci.apps.forms_builder.forms.widgets.Description':
return False
if widget == 'tendenci.apps.forms_builder.forms.widgets.Header':
return False
if field_class == 'FileField':
return False
return True
class Pricing(models.Model):
"""
Pricing options for custom payment forms.
"""
form = models.ForeignKey('Form')
label = models.CharField(max_length=100)
description = models.TextField(_("Pricing Description"), blank=True)
price = models.DecimalField(
max_digits=10,
decimal_places=2,
blank=True,
null=True,
help_text=_("Leaving this field blank allows visitors to set their own price")
)
# Recurring payment fields
taxable = models.BooleanField(default=False)
tax_rate = models.DecimalField(blank=True, max_digits=5, decimal_places=4, default=0,
help_text=_('Example: 0.0825 for 8.25%.'))
billing_period = models.CharField(max_length=50, choices=BILLING_PERIOD_CHOICES,
default='month')
billing_frequency = models.IntegerField(default=1)
num_days = models.IntegerField(default=0)
due_sore = models.CharField(_("Billing cycle start or end date"), max_length=20,
choices=DUE_SORE_CHOICES, default='start')
has_trial_period = models.BooleanField(default=False)
trial_period_days = models.IntegerField(default=0)
trial_amount = models.DecimalField(max_digits=15, decimal_places=2, blank=True, null=True, default=0.0)
class Meta:
ordering = ["pk"]
app_label = 'forms'
def __unicode__(self):
currency_symbol = get_setting("site", "global", "currencysymbol")
if not currency_symbol:
currency_symbol = '$'
return "%s - %s%s" % (self.label, currency_symbol, self.price, )
|
alirizakeles/tendenci
|
tendenci/apps/forms_builder/forms/models.py
|
Python
|
gpl-3.0
| 20,327 | 0.002903 |
from django.db import models
# Create your models here.
class ImageModel(models.Model):
image = models.ImageField(upload_to = 'pic_folder/')
|
MilesDuronCIMAT/django_image
|
upload_image/models.py
|
Python
|
mit
| 145 | 0.027586 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
import hashlib
class message(object):
def __init__(self):
'''
takes an incoming nagios message and assigns a static ID
so we always update the same doc for current status.
'''
# this plugin
# sets a static document ID
# for a particular event to allow you to have an event that just updates
# current status
self.registration = ['nagios_hostname']
self.priority = 5
def onMessage(self, message, metadata):
docid = hashlib.md5('nagiosstatus' + message['details']['nagios_hostname']).hexdigest()
metadata['id'] = docid
return (message, metadata)
|
mpurzynski/MozDef
|
mq/plugins/nagioshostname.py
|
Python
|
mpl-2.0
| 902 | 0.002217 |
# Copyright David Abrahams 2004. Distributed under the Boost
# Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
'''
>>> from staticmethod_ext import *
>>> class X1(X):
... pass
>>> x = X(16)
>>> x1 = X1(17)
>>> x1.count()
2
>>> x.count()
2
>>> X1.count()
2
>>> X.count()
2
>>> x1.magic()
7654321
>>> x.magic()
7654321
>>> X1.magic()
7654321
>>> X.magic()
7654321
'''
def run(args = None):
import sys
import doctest
if args is not None:
sys.argv = args
return doctest.testmod(sys.modules.get(__name__))
if __name__ == '__main__':
print "running..."
import sys
status = run()[0]
if (status == 0): print "Done."
sys.exit(status)
|
alexa-infra/negine
|
thirdparty/boost-python/libs/python/test/staticmethod.py
|
Python
|
mit
| 826 | 0.007264 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.