text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from numpy import *
from colorsys import *
import Tkinter as tk
import ttk
import PIL.Image, PIL.ImageTk
#-----------------------------------------------------------------------------------------
# HSV picker
# Three horizontal scales for Hue, Sat, Val
class HSV_Picker:
panel_size = 290, 32
hue, sat, val = 0, 0, 0
hue_img, sat_img, val_img = None, None, None
def __init__( self, parent, color_broadcast=None ):
self.parent = parent
self.frame = tk.Frame(self.parent)
self.colorbc = color_broadcast
# Get initial color
self.receive_color()
# Create initial images
self.create_img()
# setup frames
self.hue_panel = tk.Label(self.frame, image=self.hue_img, bd=0,
width=self.panel_size[0], height=self.panel_size[1] )
self.sat_panel = tk.Label(self.frame, image=self.sat_img, bd=0,
width=self.panel_size[0], height=self.panel_size[1] )
self.val_panel = tk.Label(self.frame, image=self.val_img, bd=0,
width=self.panel_size[0], height=self.panel_size[1] )
# bind event handlers
self.hue_panel.bind('<Button-1>', self.on_hue_click)
self.hue_panel.bind('<B1-Motion>', self.on_hue_click)
self.sat_panel.bind('<Button-1>', self.on_sat_click)
self.sat_panel.bind('<B1-Motion>', self.on_sat_click)
self.val_panel.bind('<Button-1>', self.on_val_click)
self.val_panel.bind('<B1-Motion>', self.on_val_click)
self.parent.bind('<<NotebookTabChanged>>', self.on_tab_changed)
self.place()
def place(self,**args): # place frames on grid
self.frame.grid(args)
tk.Label(self.frame, text='Hue').grid(column=0, row=0, padx=8, pady=(6,0), sticky=tk.W)
self.hue_panel.grid(column=0, row=1, padx=8, pady=(0,6), sticky=tk.W+tk.E)
tk.Label(self.frame, text='Saturation').grid(column=0, row=2, padx=8, pady=0, sticky=tk.W)
self.sat_panel.grid(column=0, row=3, padx=8, pady=(0,6), sticky=tk.W+tk.E)
tk.Label(self.frame, text='Value (Brightness)').grid(column=0, row=4, padx=8, pady=0, sticky=tk.W)
self.val_panel.grid(column=0, row=5, padx=8, pady=(0,6), sticky=tk.W+tk.E)
##self.hue_panel.grid(column=0, row=0, padx=8, pady=8, sticky=tk.W+tk.E)
##self.sat_panel.grid(column=0, row=1, padx=8, pady=8, sticky=tk.W+tk.E)
##self.val_panel.grid(column=0, row=2, padx=8, pady=8, sticky=tk.W+tk.E)
def create_img(self):
self.create_hue_img()
self.create_sat_img()
self.create_val_img()
def create_hue_img(self):
w,h = self.panel_size
if (self.hue_img==None): # First call, create color scale
hue_scale = empty((h,w,3), dtype=uint8)
hue_scale[:] = 255*array([hsv_to_rgb(x,0.9,0.9) for x in 1.*arange(0,w)/w])
self.hue_scale = hue_scale
# Mark current value
hue_scale = self.hue_scale.copy()
hue_scale[:, int(self.hue*(w-1)), :] = 0
# Create image object for gui
hue_img = PIL.Image.frombuffer('RGB', (w,h), hue_scale, 'raw', 'RGB', 0, 1)
if (self.hue_img==None):
self.hue_img = PIL.ImageTk.PhotoImage( hue_img )
else:
self.hue_img.paste( hue_img ) # PASTE! Do not replace. Image frame remembers original object
def create_sat_img(self):
w,h = self.panel_size
sat_scale = empty((h,w,3), dtype=uint8)
sat_scale[:] = 255*array([hsv_to_rgb(self.hue, x, 1) for x in 1.*arange(0,w)/w])
#Mark current value
sat_scale[:, int(self.sat*(w-1)), :] = 0
# Create image object for gui
sat_img = PIL.Image.frombuffer('RGB', (w,h), sat_scale, 'raw', 'RGB', 0, 1)
if (self.sat_img==None):
self.sat_img = PIL.ImageTk.PhotoImage( sat_img )
else:
self.sat_img.paste( sat_img ) # PASTE! Do not replace. Image frame remembers original object
def create_val_img(self):
w,h = self.panel_size
val_scale = empty((h,w,3), dtype=uint8)
val_scale[:] = 255*array([hsv_to_rgb(self.hue, self.sat, x) for x in 1.*arange(0,w)/w])
# Mark current value
val_scale[:, int(self.val*(w-1)), :] = 255 if self.val<0.5 else 0
# Create image object for gui
val_img = PIL.Image.frombuffer('RGB', (w,h), val_scale, 'raw', 'RGB', 0, 1)
if (self.val_img==None):
self.val_img = PIL.ImageTk.PhotoImage( val_img )
else:
self.val_img.paste( val_img ) # PASTE! Do not replace. Image frame remembers original object
def on_hue_click(self, event):
x = clip( event.x, 0, self.panel_size[0] )
print 'x=', x
self.hue = float(x)/self.panel_size[0]
print "hue=", self.hue
self.create_hue_img()
self.create_sat_img()
self.create_val_img()
self.broadcast_color()
def on_sat_click(self, event):
x = clip( event.x, 0, self.panel_size[0] )
print 'x=', x
self.sat = float(x)/self.panel_size[0]
print "sat=", self.sat
self.create_sat_img()
self.create_val_img()
self.broadcast_color()
def on_val_click(self, event):
x = clip( event.x, 0, self.panel_size[0] )
print 'x=', x
self.val = float(x)/self.panel_size[0]
print "val=", self.val
self.create_sat_img()
self.create_val_img()
self.broadcast_color()
def on_tab_changed(self, event):
print 'HSV tab'
self.receive_color()
self.create_img()
self.broadcast_color()
def broadcast_color(self):
if self.colorbc:
rgb = hsv_to_rgb(self.hue, self.sat, self.val)
var = ( ('H',self.hue), ('S',self.sat), ('V',self.val) )
self.colorbc.set( rgb, var )
def receive_color(self):
if self.colorbc:
r,g,b = self.colorbc.get_rgb()
else: r,g,b = 0,0,0
self.hue, self.sat, self.val = rgb_to_hsv(r,g,b)
#-----------------------------------------------------------------------------------------
# H(SV) picker
# Two widgets: sat-val plane, vertical hue scale
class H_SV_Picker:
hue_panel_size = 32, 256
sv_panel_size = 256, 256
hue, sat, val = 0, 0, 0
hue_img = None
sv_img = None
def __init__(self, parent, color_broadcast=None):
self.parent = parent
self.frame = tk.Frame(self.parent)
self.colorbc = color_broadcast
# Get initial color
self.receive_color()
# Create initial images
self.create_img()
# setup frames
self.sv_panel = tk.Label(self.frame, image=self.sv_img, bd=0,
width=self.sv_panel_size[0], height=self.sv_panel_size[1])
self.hue_panel = tk.Label(self.frame, image=self.hue_img, bd=0,
width=self.hue_panel_size[0], height=self.hue_panel_size[1] )
# bind event handlers
self.sv_panel.bind('<Button-1>', self.on_sv_click)
self.sv_panel.bind('<B1-Motion>', self.on_sv_click)
self.hue_panel.bind('<Button-1>', self.on_hue_click)
self.hue_panel.bind('<B1-Motion>', self.on_hue_click)
self.parent.bind('<<NotebookTabChanged>>', self.on_tab_changed)
self.place()
def place(self, **args): # place frames on grid
self.frame.grid(args)
tk.Label(self.frame, text='Saturation / Value (Brightness)').grid(column=0, row=0, padx=(8,4), pady=(4,0), sticky=tk.W)
self.sv_panel.grid(column=0, row=1, padx=(8,4), pady=(2,8), sticky=tk.W+tk.E+tk.N+tk.S)
tk.Label(self.frame, text='Hue').grid(column=1, row=0, padx=(4,8), pady=(4,0))
self.hue_panel.grid(column=1, row=1, padx=(4,8), pady=(2,8), sticky=tk.N+tk.S)
def create_hue_img(self):
w,h = self.hue_panel_size
if (self.hue_img==None): # First call, create static hue-scale
hue_scale = 255*array([hsv_to_rgb(1.-y,0.9,0.9) for y in 1.*arange(0,h)/h])
self.hue_scale = hue_scale.astype(uint8).copy() # Make sure to keep a copy!
hue_scale = self.hue_scale.copy()
hue_scale[ int((1-self.hue)*(h-1)), :]=0 # Mark current hue value
hue_img = PIL.Image.frombuffer('RGB', (1,h), hue_scale, 'raw', 'RGB', 0, 1)
hue_img = hue_img.resize( self.hue_panel_size )
if (self.hue_img==None):
self.hue_img = PIL.ImageTk.PhotoImage( hue_img )
else:
self.hue_img.paste( hue_img ) # PASTE! Do not replace. Image frame remembers original object
def create_sv_img(self, hue=None):
if (hue==None): hue=self.hue
rgb = array(hsv_to_rgb(self.hue,1,1))
# Create sat axis for given hue
##for si in range(256): sat[si,256] = (rgb-1)*si/256.+1
sat = (full((256,3), rgb )-1) * tile( arange(256)[:,newaxis], (1,3))/256. + 1
# Create sat-val plane from sat axis
##for vi in range(256): fd1[:,vi] = fd1[:,256] *vi/256.
sv = transpose( tile(sat[:,newaxis], (256,1) ), (1,0,2) )* repeat(arange(256)[::-1],256*3).reshape(256,256,3)/256.
# Mark click position
s,v = int(self.sat*255), int(255-self.val*255)
s0=max(s-10,0); s1=min(s+10,255); v0=max(v-10,0); v1=min(v+10,255)
c = 1. if v>100 else 0.
sv[v,s0:s1,:] = c; sv[v0:v1,s,:] = c
# Create image object from data
sv_img = PIL.Image.frombuffer('RGB', (256,256), (sv*255).astype(uint8), 'raw', 'RGB', 0, 1)
sv_img = sv_img.resize( self.sv_panel_size )
if (self.sv_img==None):
self.sv_img = PIL.ImageTk.PhotoImage( sv_img ) # Make sure to keep a copy!
else:
self.sv_img.paste( sv_img ) # PASTE! Do not replace. Image frame remembers original object
def create_img(self):
self.create_hue_img()
self.create_sv_img()
def on_hue_click(self, event):
y = clip( event.y, 0, self.hue_panel_size[1] )
print 'y=', y
self.hue = 1.-float(y)/self.hue_panel_size[1]
print "hue=", self.hue
self.create_hue_img()
self.create_sv_img()
self.broadcast_color()
def on_sv_click(self, event):
x = clip( event.x, 0, self.sv_panel_size[0] )
y = clip( event.y, 0, self.sv_panel_size[1] )
self.sat = float(x)/self.sv_panel_size[0]
self.val = 1.-float(y)/self.sv_panel_size[1]
print "sat=", self.sat, "val=", self.val
self.create_sv_img()
self.broadcast_color()
def on_tab_changed(self, event):
print 'H(SV) tab'
self.receive_color()
self.create_img()
self.broadcast_color()
def broadcast_color(self):
if self.colorbc:
rgb = hsv_to_rgb(self.hue, self.sat, self.val)
var = ( ('H',self.hue), ('S',self.sat), ('V',self.val) )
self.colorbc.set( rgb, var )
def receive_color(self):
if self.colorbc:
r,g,b = self.colorbc.get_rgb()
else: r,g,b = 0,0,0
self.hue, self.sat, self.val = rgb_to_hsv(r,g,b)
#-----------------------------------------------------------------------------------------
# (HS)V picker
# Two widgets: hue-sat plane, vertical val scale
class HS_V_Picker:
hs_panel_size = 256, 256
val_panel_size = 32, 256
hue, sat, val = 0, 0, 0
hs_img = None
val_img = None
def __init__(self, parent, color_broadcast=None):
self.parent = parent
self.frame = tk.Frame(self.parent)
self.colorbc = color_broadcast
# Get initial color
self.receive_color()
# Create initial images
self.create_img()
# setup frames
self.hs_panel = tk.Label(self.frame, image=self.hs_img, bd=0,
width=self.hs_panel_size[0], height=self.hs_panel_size[1])
self.val_panel = tk.Label(self.frame, image=self.val_img, bd=0,
width=self.val_panel_size[0], height=self.val_panel_size[1] )
# bind event handlers
self.hs_panel.bind('<Button-1>', self.on_hs_click)
self.hs_panel.bind('<B1-Motion>', self.on_hs_click)
self.val_panel.bind('<Button-1>', self.on_val_click)
self.val_panel.bind('<B1-Motion>', self.on_val_click)
self.parent.bind('<<NotebookTabChanged>>', self.on_tab_changed)
self.place()
def place(self, **args): # place frames on grid
self.frame.grid(args)
tk.Label(self.frame, text='Hue / Saturation').grid(column=0, row=0, padx=(8,4), pady=(4,0), sticky=tk.W)
self.hs_panel.grid(column=0, row=1, padx=(8,4), pady=(2,8), sticky=tk.W+tk.E+tk.N+tk.S)
tk.Label(self.frame, text='Value').grid(column=1, row=0, padx=(4,8), pady=(4,0))
self.val_panel.grid(column=1, row=1, padx=(4,8), pady=(2,8), sticky=tk.N+tk.S)
def create_val_img(self):
w,h = self.val_panel_size
if (self.val_img==None): # First call, create static val-scale
val_scale = repeat(1.0*arange(h)[::-1]/h, w*3).reshape((h,w,3))
self.val_scale = (255*val_scale).astype(uint8)
val_scale = self.val_scale.copy()
rgb = hsv_to_rgb(self.hue, self.sat, 0.9)
#val_scale *= rgb
val_scale = (val_scale.astype(float) * rgb).astype(uint8)
# Mark current value
val_scale[ int((1-self.val)*(h-1)), :, :] = 255 if self.val<0.5 else 0
# Create image object for gui
val_img = PIL.Image.frombuffer('RGB', (w,h), val_scale, 'raw', 'RGB', 0, 1)
if (self.val_img==None):
self.val_img = PIL.ImageTk.PhotoImage( val_img )
else:
self.val_img.paste( val_img ) # PASTE! Do not replace. Image frame remembers original object
def create_hs_img(self):
w,h = self.hs_panel_size
if (self.hs_img==None): # First call, create static hue-sat-scale
hue_scale = array([hsv_to_rgb(x,1,1) for x in 1.*arange(0,w)/w])
sat_weight = repeat(1.0*arange(h)[::-1]/h, w*3).reshape((h,w,3))
hs_scale = tile( hue_scale, (h,1) ).reshape(h,w,3)
hs_scale = (hs_scale-1)*sat_weight+1
self.hs_scale = (255*hs_scale).astype(uint8)
hs_scale = self.hs_scale.copy()
# Mark click position
x,y = int(self.hue*255), int(255-self.sat*255)
x0=max(x-10,0); x1=min(x+10,255); y0=max(y-10,0); y1=min(y+10,255)
c = 1. if y>100 else 0.
hs_scale[y,x0:x1,:] = c; hs_scale[y0:y1,x,:] = c
# Create image object from data
hs_img = PIL.Image.frombuffer('RGB', (256,256), hs_scale, 'raw', 'RGB', 0, 1)
if (self.hs_img==None):
self.hs_img = PIL.ImageTk.PhotoImage( hs_img ) # Make sure to keep a copy!
else:
self.hs_img.paste( hs_img ) # PASTE! Do not replace. Image frame remembers original object
def create_img(self):
self.create_hs_img()
self.create_val_img()
def on_val_click(self, event):
y = clip( event.y, 0, self.val_panel_size[1] )
print 'y=', y
self.val = 1.-float(y)/self.val_panel_size[1]
print "val=", self.val
self.create_val_img()
self.broadcast_color()
def on_hs_click(self, event):
w,h = self.hs_panel_size
x = clip( event.x, 0, w )
y = clip( event.y, 0, h )
self.hue = float(x)/w
self.sat = 1.-float(y)/h
print "hue=", self.hue, "sat=", self.sat
self.create_hs_img()
self.create_val_img()
self.broadcast_color()
def on_tab_changed(self, event):
print '(HS)V tab'
self.receive_color()
self.create_img()
self.broadcast_color()
def broadcast_color(self):
if self.colorbc:
rgb = hsv_to_rgb(self.hue, self.sat, self.val)
var = ( ('H',self.hue), ('S',self.sat), ('V',self.val) )
self.colorbc.set( rgb, var )
def receive_color(self):
if self.colorbc:
r,g,b = self.colorbc.get_rgb()
else: r,g,b = 0,0,0
self.hue, self.sat, self.val = rgb_to_hsv(r,g,b)
| fablab-bayreuth/fablight | Fablight-Gui/hsv_picker.py | Python | mit | 16,281 | 0.021559 |
from graphics.widgets.single_line_output_f import SingleLineOutputF
from utils.test_sess_overall_results import TestSessOverallResults
import constants.output_constants as const
import tkinter as tk
class TestSessOverallResultsOutputF(tk.Frame):
"""
- Use to display overall results for a test session.
"""
def __init__(self,
parent,
disabled=False):
"""
:param parent: Parent.
:param disabled: - Default: False;
- If True all the widgets will be disabled.
"""
tk.Frame.__init__(self,
parent,
relief=const.TSOR_FRAME_RELIEF,
padx=const.TSOR_FRAME_PADX,
pady=const.TSOR_FRAME_PADY,
bd=const.TSOR_FRAME_BD)
self._slo_identifiers_classes = []
self._create_widgets()
self._place_widgets()
if disabled:
self.disable()
#########################################################################
# Widget handling
def _create_widgets(self):
self._lbl_title = tk.Label(
self,
font=const.TSOR_TITLE_FONT,
text=const.TSOR_TITLE_TEXT,
padx=const.TSOR_TITLE_PADX,
pady=const.TSOR_TITLE_PADY,
)
self._f_results = tk.Frame(
self,
relief=const.TSOR_SUBFRAME_RELIEF,
padx=const.TSOR_SUBFRAME_PADX,
pady=const.TSOR_SUBFRAME_PADY,
bd=const.TSOR_SUBFRAME_BD
)
self._slo_subtitle = SingleLineOutputF(
parent=self,
description_width=28,
font=const.TSOR_SUBTITLE_FONT,
description=const.TSOR_SUBTITLE_EVAL_METHOD_TEXT,
output_text=const.TSOR_SUBTITLE_RESULT_TEXT
)
self._slo_precision = SingleLineOutputF(
parent=self._f_results,
description_width=26,
font=const.TSOR_FONT,
description=const.TSOR_PRECISION_TEXT,
output_text=const.TSOR_PRECISION_INITIAL_TEXT
)
self._slo_recall = SingleLineOutputF(
parent=self._f_results,
description_width=26,
font=const.TSOR_FONT,
description=const.TSOR_RECALL_TEXT,
output_text=const.TSOR_RECALL_INITIAL_TEXT
)
self._slo_f_measure = SingleLineOutputF(
parent=self._f_results,
description_width=26,
font=const.TSOR_FONT,
description=const.TSOR_F_MEASURE_TEXT,
output_text=const.TSOR_F_MEASURE_INITIAL_TEXT
)
def _place_widgets(self):
self._lbl_title.pack(side='top',
fill='both',
expand=True)
self._slo_subtitle.pack(side='top',
fill='both',
expand=True)
self._slo_precision.pack(side='top',
fill='both',
expand=True)
self._slo_recall.pack(side='top',
fill='both',
expand=True)
self._slo_f_measure.pack(side='top',
fill='both',
expand=True)
self._f_results.pack(side='top',
fill='both',
expand=True)
#########################################################################
# Public methods
def update_results(
self,
overall_results: TestSessOverallResults):
"""
- Updates the results.
:param overall_results: Overall test session results.
"""
if overall_results.is_valid():
self._slo_precision.update_output(
output_text='%.2f' % overall_results.precision)
self._slo_recall.update_output(
output_text='%.2f' % overall_results.recall)
self._slo_f_measure.update_output(
output_text='%.2f' % overall_results.f_measure)
else:
raise ValueError('Overall results are not valid:\n\n'
+ str(overall_results))
def enable(self):
""" Enables all the widgets."""
self._lbl_title.config(state='normal')
self._slo_precision.enable()
self._slo_f_measure.enable()
self._slo_subtitle.enable()
self._slo_recall.enable()
for item in self._slo_identifiers_classes:
item.enable()
def disable(self):
""" Disables all the widgets."""
self._slo_recall.disable()
self._slo_subtitle.disable()
self._slo_f_measure.disable()
self._slo_precision.disable()
self._lbl_title.config(state='disabled')
#########################################################################
| dani-i/bachelor-project | graphics/output/test_sess/test_sess_overall_results_output_f.py | Python | apache-2.0 | 5,036 | 0.000397 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RestartDeployedCodePackageDescription(Model):
"""Defines description for restarting a deloyed code package on Service Fabric
node.
.
:param service_manifest_name:
:type service_manifest_name: str
:param service_package_activation_id:
:type service_package_activation_id: str
:param code_package_name:
:type code_package_name: str
:param code_package_instance_id:
:type code_package_instance_id: str
"""
_validation = {
'service_manifest_name': {'required': True},
'code_package_name': {'required': True},
'code_package_instance_id': {'required': True},
}
_attribute_map = {
'service_manifest_name': {'key': 'ServiceManifestName', 'type': 'str'},
'service_package_activation_id': {'key': 'ServicePackageActivationId', 'type': 'str'},
'code_package_name': {'key': 'CodePackageName', 'type': 'str'},
'code_package_instance_id': {'key': 'CodePackageInstanceId', 'type': 'str'},
}
def __init__(self, service_manifest_name, code_package_name, code_package_instance_id, service_package_activation_id=None):
self.service_manifest_name = service_manifest_name
self.service_package_activation_id = service_package_activation_id
self.code_package_name = code_package_name
self.code_package_instance_id = code_package_instance_id
| AutorestCI/azure-sdk-for-python | azure-servicefabric/azure/servicefabric/models/restart_deployed_code_package_description.py | Python | mit | 1,899 | 0.002106 |
# Copyright 2013. Amazon Web Services, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import json
import flask
from flask import request, Response
#from boto import dynamodb2
#from boto.dynamodb2.table import Table
#from boto.dynamodb2.items import Item
#from boto.dynamodb2.exceptions import ConditionalCheckFailedException
from boto import sns
from flask import Flask, jsonify
from flask.ext.sqlalchemy import SQLAlchemy
# Default config vals
THEME = 'default' if os.environ.get('THEME') is None else os.environ.get('THEME')
FLASK_DEBUG = 'false' if os.environ.get('FLASK_DEBUG') is None else os.environ.get('FLASK_DEBUG')
AWS_REGION = 'us-east-1' if os.environ.get('AWS_REGION') is None else os.environ.get('AWS_REGION')
#STARTUP_SIGNUP_TABLE = '' if os.environ.get('STARTUP_SIGNUP_TABLE') is None else os.environ.get('STARTUP_SIGNUP_TABLE')
STARTUP_SIGNUP_RDBMS = '' if os.environ.get('STARTUP_SIGNUP_RDBMS') is None else os.environ.get('STARTUP_SIGNUP_RDBMS')
NEW_SIGNUP_TOPIC = '' if os.environ.get('NEW_SIGNUP_TOPIC') is None else os.environ.get('NEW_SIGNUP_TOPIC')
# Create the Flask app
application = flask.Flask(__name__)
# Load config values specified above
application.config.from_object(__name__)
# Only enable Flask debugging if an env var is set to true
application.debug = application.config['FLASK_DEBUG'] in ['true', 'True']
# Connect to MySQL DB
application.config['MYSQL_DATABASE_USER'] = 'dbuser'
application.config['MYSQL_DATABASE_PASSWORD'] = 'dbpassword'
application.config['MYSQL_DATABASE_DB'] = 'userdb'
application.config['MYSQL_DATABASE_HOST'] = application.config['STARTUP_SIGNUP_RDBMS']
application.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://' + application.config['MYSQL_DATABASE_USER'] + ':' + application.config['MYSQL_DATABASE_PASSWORD'] + '@' + application.config['MYSQL_DATABASE_HOST'] + '/' + application.config['MYSQL_DATABASE_DB']
db = SQLAlchemy(application)
class User(db.Model):
__tablename__ = 'users'
email = db.Column(db.String(255), primary_key=True)
name = db.Column(db.String(255))
theme = db.Column(db.String(30))
previewAccess = db.Column(db.String(10))
# Connect to DynamoDB and get ref to Table
#ddb_conn = dynamodb2.connect_to_region(application.config['AWS_REGION'])
#ddb_table = Table(table_name=application.config['STARTUP_SIGNUP_TABLE'],connection=ddb_conn)
# Connect to SNS
sns_conn = sns.connect_to_region(application.config['AWS_REGION'])
@application.route('/')
def welcome():
theme = application.config['THEME']
return flask.render_template('index.html', theme=theme, flask_debug=application.debug)
@application.route('/signup', methods=['POST'])
def signup():
signup_data = dict()
for item in request.form:
signup_data[item] = request.form[item]
exists = User.query.filter_by(email=signup_data["email"]).first()
if exists is None:
store_in_rdbms(signup_data)
# store_in_dynamo(signup_data)
publish_to_sns(signup_data)
else:
return Response("", status=409, mimetype='application/json')
return Response(json.dumps(signup_data), status=201, mimetype='application/json')
def store_in_rdbms(signup_data):
db.session.add(User(**signup_data))
db.session.commit()
#def store_in_dynamo(signup_data):
# signup_item = Item(ddb_table, data=signup_data)
# signup_item.save()
def publish_to_sns(signup_data):
try:
sns_conn.publish(application.config['NEW_SIGNUP_TOPIC'], json.dumps(signup_data), "New signup: %s" % signup_data['email'])
except Exception as ex:
sys.stderr.write("Error publishing subscription message to SNS: %s" % ex.message)
@application.errorhandler(404)
def not_found_error(error):
print u'{ "Page Not Found": "%s" }' % error
theme = application.config['THEME']
return flask.render_template('404.html', theme=theme, title='404 File Not Found'), 404
@application.errorhandler(500)
def internal_error(error):
db.session.rollback()
print u'{ "Reason": "%s" }' % error
theme = application.config['THEME']
return flask.render_template('500.html', theme=theme, title='Unexpected Error Occured'), 500
if __name__ == '__main__':
application.run(host='0.0.0.0')
| SimyungYang/py-flask-signup | application.py | Python | apache-2.0 | 4,769 | 0.00692 |
# coding: utf-8
"""payu.experiment
===============
Basic file system operations for Payu
:copyright: Copyright 2011 Marshall Ward, see AUTHORS for details.
:license: Apache License, Version 2.0, see LICENSE for details.
"""
# Standard library
import errno
import sys, os
import subprocess
import shlex
# Extensions
import yaml
DEFAULT_CONFIG_FNAME = 'config.yaml'
# Lustre target paths for symbolic paths cannot be 60 characters (yes, really)
# Delete this once this bug in Lustre is fixed
CHECK_LUSTRE_PATH_LEN = True
def mkdir_p(path):
"""Create a new directory; ignore if it already exists."""
try:
os.makedirs(path)
except EnvironmentError as exc:
if exc.errno != errno.EEXIST:
raise
def read_config(config_fname=None):
"""Parse input configuration file and return a config dict."""
if not config_fname:
config_fname = DEFAULT_CONFIG_FNAME
try:
with open(config_fname, 'r') as config_file:
config = yaml.load(config_file)
except IOError as exc:
if exc.errno == errno.ENOENT:
print('payu: warning: Configuration file {0} not found!'
.format(config_fname))
config = {}
else:
raise
collate_config = config.pop('collate', {})
# Transform legacy collate config options
if type(collate_config) is bool:
collate_config = {'enable': collate_config}
collatestr = 'collate_'
foundkeys = []
# Cycle through old collate config and convert to newer dict format
for key in list(config.keys()):
if key.startswith(collatestr):
foundkeys.append(key)
collate_config[key[len(collatestr):]] = config.pop(key)
if foundkeys:
print("Use of these keys is deprecated: {}.".format(
", ".join(foundkeys)))
print("Instead use collate dictionary and subkey "
"without 'collate_' prefix")
config['collate'] = collate_config
return config
def make_symlink(src_path, lnk_path):
"""Safely create a symbolic link to an input field."""
# Check for Lustre 60-character symbolic link path bug
if CHECK_LUSTRE_PATH_LEN:
src_path = patch_lustre_path(src_path)
lnk_path = patch_lustre_path(lnk_path)
# os.symlink will happily make a symlink to a non-existent
# file, but we don't want that behaviour
if not os.path.exists(src_path):
return
try:
os.symlink(src_path, lnk_path)
except EnvironmentError as exc:
if exc.errno != errno.EEXIST:
raise
elif not os.path.islink(lnk_path):
# Warn the user, but do not interrupt the job
print("Warning: Cannot create symbolic link to {p}; a file named "
"{f} already exists.".format(p=src_path, f=lnk_path))
else:
# Overwrite any existing symbolic link
if os.path.realpath(lnk_path) != src_path:
os.remove(lnk_path)
os.symlink(src_path, lnk_path)
def splitpath(path):
"""Recursively split a filepath into all directories and files."""
head, tail = os.path.split(path)
if tail == '':
return head,
elif head == '':
return tail,
else:
return splitpath(head) + (tail,)
def patch_lustre_path(f_path):
"""Patch any 60-character pathnames, to avoid a current Lustre bug."""
if CHECK_LUSTRE_PATH_LEN and len(f_path) == 60:
if os.path.isabs(f_path):
f_path = '/.' + f_path
else:
f_path = './' + f_path
return f_path
def get_commit_id(filepath):
"""
Return git commit hash for filepath
"""
cmd = shlex.split("git log -n 1 --pretty=format:%H -- ")
cmd.append(filepath)
try:
with open(os.devnull, 'w') as devnull:
hash = subprocess.check_output(cmd, stderr=devnull)
if sys.version_info.major==3:
hash.decode('ascii')
return hash.strip()
except subprocess.CalledProcessError:
return None
def get_git_revision_hash(short=False):
"""
Return git commit hash for repository
"""
cmd = ['git', 'rev-parse', 'HEAD']
if short:
cmd.insert(-1,'--short')
try:
with open(os.devnull, 'w') as devnull:
hash = subprocess.check_output(['git', 'rev-parse', 'HEAD'], stderr=devnull)
if sys.version_info.major==3:
hash.decode('ascii')
return hash.strip()
except subprocess.CalledProcessError:
return None
def is_ancestor(id1, id2):
"""
Return True if git commit id1 is a ancestor of git commit id2
"""
try:
with open(os.devnull, 'w') as devnull:
revs = subprocess.check_output(['git', 'rev-list', id2], stderr=devnull)
except:
return None
else:
return id1 in revs | aidanheerdegen/payu | payu/fsops.py | Python | apache-2.0 | 4,890 | 0.003067 |
# Copyright (c) 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from hyperv.nova import driver
HyperVDriver = driver.HyperVDriver
| adelina-t/compute-hyperv | hyperv/nova/__init__.py | Python | apache-2.0 | 687 | 0 |
# Django settings for controller project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Tim Spriggs', 'tims@arizona.edu'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '/home/apwm/controller/db.sqlite3', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.4/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Phoenix'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '/home/apwm/controller/controller/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/apwm/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '^0@$9mm^v@+f#^su8&ee+=1y8q44#t2+$aiy%@)c6e1%_o27o$'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'controller.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'controller.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
'/home/apwm/controller/controller/basic/templates',
'/home/apwm/controller/controller/templates',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'controller.basic',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| imoverclocked/ServoBot | apwm_home/controller/controller/settings.py | Python | mit | 5,602 | 0.001428 |
#!/usr/bin/env python3
#------------------------------------------------------
#
# This is a program for JoystickPS2 Module.
#
# This program depend on PCF8591 ADC chip. Follow
# the instruction book to connect the module and
# ADC0832 to your Raspberry Pi.
#
#------------------------------------------------------
import PCF8591 as ADC
import time
def setup():
ADC.setup(0x48) # Setup PCF8591
global state
def direction(): #get joystick result
state = ['home', 'up', 'down', 'left', 'right', 'pressed']
i = 0
if ADC.read(0) <= 30:
i = 1 #up
if ADC.read(0) >= 225:
i = 2 #down
if ADC.read(1) >= 225:
i = 3 #left
if ADC.read(1) <= 30:
i = 4 #right
if ADC.read(2) <= 30:
i = 5 # Button pressed
if ADC.read(0) - 125 < 15 and ADC.read(0) - 125 > -15 and ADC.read(1) - 125 < 15 and ADC.read(1) - 125 > -15 and ADC.read(2) == 255:
i = 0
return state[i]
def loop():
status = ''
while True:
tmp = direction()
if tmp != None and tmp != status:
print (tmp)
status = tmp
def destroy():
pass
if __name__ == '__main__': # Program start from here
setup()
try:
loop()
except KeyboardInterrupt: # When 'Ctrl+C' is pressed, the child program destroy() will be executed.
destroy()
| sunfounder/SunFounder_SensorKit_for_RPi2 | Python/15_joystick_PS2.py | Python | gpl-2.0 | 1,235 | 0.045344 |
# Copyright 2020 by Kurt Rathjen. All Rights Reserved.
#
# This library is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. This library is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
"""
# Example:
import mutils.tests.test_attribute
reload(mutils.tests.test_attribute)
mutils.tests.test_attribute.run()
"""
import os
import unittest
import maya.cmds
import mutils
class TestAttribute(unittest.TestCase):
def setUp(self):
"""
Open an existing maya test scene for testing.
"""
dirname = os.path.dirname(mutils.__file__)
dirname = os.path.join(dirname, "tests", "data")
path = os.path.join(dirname, "sphere.ma")
maya.cmds.file(
path,
open=True,
force=True,
ignoreVersion=True,
executeScriptNodes=False,
)
def test_attribute_limit(self):
"""
Test the attribute limit when setting the attribute value.
"""
range_ = (-100, 100)
maya.cmds.cutKey("sphere", cl=True, time=range_, f=range_, at="testLimit")
attr = mutils.Attribute("sphere", "testLimit")
attr.set(200)
value = maya.cmds.getAttr("sphere.testLimit")
assert value == 10, "Maximum attibute limit was ignored when setting the attribute value"
def test_attribute_limit2(self):
"""
Test the maximum attribute limit when setting a keyframe.
"""
attr = mutils.Attribute("sphere", "testLimit")
attr.setKeyframe(200)
value = maya.cmds.keyframe("sphere.testLimit", query=True, eval=True)[0]
assert value == 10, "Maximum attibute limit was ignored when setting animation keyframe"
def test_attribute_limit3(self):
"""
Test the minimum attribute limit when setting a keyframe.
"""
attr = mutils.Attribute("sphere", "testLimit")
attr.setKeyframe(-200)
value = maya.cmds.keyframe("sphere.testLimit", query=True, eval=True)[0]
assert value == -10, "Minimum attibute limit was ignored when setting animation keyframe"
def test_non_keyable(self):
"""
Test if non-keyable attributes can be keyed.
"""
range_ = (-100, 100)
maya.cmds.cutKey("sphere", cl=True, time=range_, f=range_, at="testNonKeyable")
attr = mutils.Attribute("sphere", "testNonKeyable")
attr.setKeyframe(200)
value = maya.cmds.keyframe("sphere.testNonKeyable", query=True, eval=True)
assert value is None, "Non keyable attribute was keyed"
def test_anim_curve(self):
"""
Test if get anim curve returns the right value.
"""
msg = "Incorrect anim curve was returned when using attr.animCurve "
attr = mutils.Attribute("sphere", "testFloat")
curve = attr.animCurve()
assert curve is None, msg + "1"
attr = mutils.Attribute("sphere", "testConnected")
curve = attr.animCurve()
assert curve is None, msg + "2"
attr = mutils.Attribute("sphere", "testAnimated")
curve = attr.animCurve()
assert curve == "sphere_testAnimated", msg + "3"
def test_set_anim_curve(self):
"""
Test if set anim curve
"""
msg = "No anim curve was set"
attr = mutils.Attribute("sphere", "testAnimated")
srcCurve = attr.animCurve()
attr = mutils.Attribute("sphere", "testFloat")
attr.setAnimCurve(srcCurve, time=(1, 15), option="replace")
curve = attr.animCurve()
assert curve is not None, msg
attr = mutils.Attribute("sphere", "testFloat")
attr.setAnimCurve(srcCurve, time=(15, 15), option="replaceCompletely")
curve = attr.animCurve()
assert curve is not None, msg
def test_set_static_keyframe(self):
"""
Test set static keyframes
"""
msg = "The inserted static keys have different values"
attr = mutils.Attribute("sphere", "testAnimated", cache=False)
attr.setStaticKeyframe(value=2, time=(4, 6), option="replace")
maya.cmds.currentTime(4)
value1 = attr.value()
maya.cmds.currentTime(6)
value2 = attr.value()
assert value1 == value2, msg
def testSuite():
"""
Return the test suite for the TestAttribute.
:rtype: unittest.TestSuite
"""
suite = unittest.TestSuite()
s = unittest.makeSuite(TestAttribute, 'test')
suite.addTest(s)
return suite
def run():
"""
Call from within Maya to run all valid tests.
Example:
import mutils.tests.test_attribute
reload(mutils.tests.test_attribute)
mutils.tests.test_attribute.run()
"""
tests = unittest.TextTestRunner()
tests.run(testSuite())
| krathjen/studiolibrary | src/mutils/tests/test_attribute.py | Python | lgpl-3.0 | 5,307 | 0.002638 |
from django.contrib.sites.models import Site
from django.utils._os import safe_join
from django.views.generic import TemplateView
from skin.conf import settings
from skin.template.loaders.util import get_site_skin
class TemplateSkinView(TemplateView):
"""
A view that extends Djangos base TemplateView to allow you to set up skins.
"""
skin_name = None
skin_path = None
def get_skin_name(self):
if self.skin_name is None:
return settings.SKIN_NAME
else:
return self.skin_name
def get_skin(self):
return get_site_skin(site=Site.objects.get_current(), name=self.get_skin_name())
def get_skin_path(self):
if self.skin_path is not None:
return self.skin_path
skin = self.get_skin()
if skin is not None:
return skin.path
else:
return None
def get_template_names(self):
template_names = super(TemplateSkinView, self).get_template_names()
skin_path = self.get_skin_path()
skin_template_names = []
if skin_path is not None:
for template_name in template_names:
skin_template_names.append(safe_join(skin_path, template_name))
return skin_template_names + template_names | dwatkinsweb/django-skin | skin/views/views.py | Python | mit | 1,288 | 0.001553 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import six
from . import layers
from .data_feeder import check_variable_and_dtype, convert_dtype
from ..utils import deprecated
__all__ = [
"simple_img_conv_pool",
"sequence_conv_pool",
"glu",
"scaled_dot_product_attention",
"img_conv_group",
]
def simple_img_conv_pool(input,
num_filters,
filter_size,
pool_size,
pool_stride,
pool_padding=0,
pool_type='max',
global_pooling=False,
conv_stride=1,
conv_padding=0,
conv_dilation=1,
conv_groups=1,
param_attr=None,
bias_attr=None,
act=None,
use_cudnn=True):
r"""
:api_attr: Static Graph
The simple_img_conv_pool api is composed of :ref:`api_fluid_layers_conv2d` and :ref:`api_fluid_layers_pool2d` .
Args:
input (Variable): 4-D Tensor, shape is [N, C, H, W], data type can be float32 or float64.
num_filters(int): The number of filters. It is the same as the output channels.
filter_size (int|list|tuple): The filter size. If filter_size is a list or
tuple, it must contain two integers, (filter_size_H, filter_size_W). Otherwise,
the filter_size_H = filter_size_W = filter_size.
pool_size (int|list|tuple): The pooling size of pool2d layer. If pool_size
is a list or tuple, it must contain two integers, (pool_size_H, pool_size_W).
Otherwise, the pool_size_H = pool_size_W = pool_size.
pool_stride (int|list|tuple): The pooling stride of pool2d layer. If pool_stride
is a list or tuple, it must contain two integers, (pooling_stride_H, pooling_stride_W).
Otherwise, the pooling_stride_H = pooling_stride_W = pool_stride.
pool_padding (int|list|tuple): The padding of pool2d layer. If pool_padding is a list or
tuple, it must contain two integers, (pool_padding_H, pool_padding_W).
Otherwise, the pool_padding_H = pool_padding_W = pool_padding. Default 0.
pool_type (str): Pooling type can be :math:`max` for max-pooling or :math:`avg` for
average-pooling. Default :math:`max`.
global_pooling (bool): Whether to use the global pooling. If global_pooling = true,
pool_size and pool_padding while be ignored. Default False
conv_stride (int|list|tuple): The stride size of the conv2d Layer. If stride is a
list or tuple, it must contain two integers, (conv_stride_H, conv_stride_W). Otherwise,
the conv_stride_H = conv_stride_W = conv_stride. Default: conv_stride = 1.
conv_padding (int|list|tuple): The padding size of the conv2d Layer. If padding is
a list or tuple, it must contain two integers, (conv_padding_H, conv_padding_W).
Otherwise, the conv_padding_H = conv_padding_W = conv_padding. Default: conv_padding = 0.
conv_dilation (int|list|tuple): The dilation size of the conv2d Layer. If dilation is
a list or tuple, it must contain two integers, (conv_dilation_H, conv_dilation_W).
Otherwise, the conv_dilation_H = conv_dilation_W = conv_dilation. Default: conv_dilation = 1.
conv_groups (int): The groups number of the conv2d Layer. According to grouped
convolution in Alex Krizhevsky's Deep CNN paper: when group=2,
the first half of the filters is only connected to the first half
of the input channels, while the second half of the filters is only
connected to the second half of the input channels. Default: groups=1.
param_attr (ParamAttr|None): The parameter attribute for learnable parameters/weights
of conv2d. If it is set to None or one attribute of ParamAttr, conv2d
will create ParamAttr as param_attr. If the Initializer of the param_attr
is not set, the parameter is initialized with :math:`Normal(0.0, std)`,
and the :math:`std` is :math:`(\\frac{2.0 }{filter\_elem\_num})^{0.5}`.
Default: None.
bias_attr (ParamAttr|bool|None): The parameter attribute for the bias of conv2d.
If it is set to False, no bias will be added to the output units.
If it is set to None or one attribute of ParamAttr, conv2d
will create ParamAttr as bias_attr. If the Initializer of the bias_attr
is not set, the bias is initialized zero. Default: None.
act (str): Activation type for conv2d, if it is set to None, activation is not
appended. Default: None.
use_cudnn (bool): Use cudnn kernel or not, it is valid only when the cudnn
library is installed. Default: True
Return:
4-D Tensor, the result of input after conv2d and pool2d, with the same data type as :attr:`input`
Return Type:
Variable
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
img = fluid.data(name='img', shape=[100, 1, 28, 28], dtype='float32')
conv_pool = fluid.nets.simple_img_conv_pool(input=img,
filter_size=5,
num_filters=20,
pool_size=2,
pool_stride=2,
act="relu")
"""
conv_out = layers.conv2d(
input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=conv_stride,
padding=conv_padding,
dilation=conv_dilation,
groups=conv_groups,
param_attr=param_attr,
bias_attr=bias_attr,
act=act,
use_cudnn=use_cudnn)
pool_out = layers.pool2d(
input=conv_out,
pool_size=pool_size,
pool_type=pool_type,
pool_stride=pool_stride,
pool_padding=pool_padding,
global_pooling=global_pooling,
use_cudnn=use_cudnn)
return pool_out
def img_conv_group(input,
conv_num_filter,
pool_size,
conv_padding=1,
conv_filter_size=3,
conv_act=None,
param_attr=None,
conv_with_batchnorm=False,
conv_batchnorm_drop_rate=0.0,
pool_stride=1,
pool_type="max",
use_cudnn=True):
"""
:api_attr: Static Graph
The Image Convolution Group is composed of Convolution2d, BatchNorm, DropOut,
and Pool2D. According to the input arguments, img_conv_group will do serials of
computation for Input using Convolution2d, BatchNorm, DropOut, and pass the last
result to Pool2D.
Args:
input (Variable): The input is 4-D Tensor with shape [N, C, H, W], the data type of input is float32 or float64.
conv_num_filter(list|tuple): Indicates the numbers of filter of this group.
pool_size (int|list|tuple): The pooling size of Pool2D Layer. If pool_size
is a list or tuple, it must contain two integers, (pool_size_height, pool_size_width).
Otherwise, the pool_size_height = pool_size_width = pool_size.
conv_padding (int|list|tuple): The padding size of the Conv2D Layer. If padding is
a list or tuple, its length must be equal to the length of conv_num_filter.
Otherwise the conv_padding of all Conv2D Layers are the same. Default 1.
conv_filter_size (int|list|tuple): The filter size. If filter_size is a list or
tuple, its length must be equal to the length of conv_num_filter.
Otherwise the conv_filter_size of all Conv2D Layers are the same. Default 3.
conv_act (str): Activation type for Conv2D Layer that is not followed by BatchNorm.
Default: None.
param_attr (ParamAttr): The parameters to the Conv2D Layer. Default: None
conv_with_batchnorm (bool|list): Indicates whether to use BatchNorm after Conv2D Layer.
If conv_with_batchnorm is a list, its length must be equal to the length of
conv_num_filter. Otherwise, conv_with_batchnorm indicates whether all the
Conv2D Layer follows a BatchNorm. Default False.
conv_batchnorm_drop_rate (float|list): Indicates the drop_rate of Dropout Layer
after BatchNorm. If conv_batchnorm_drop_rate is a list, its length must be
equal to the length of conv_num_filter. Otherwise, drop_rate of all Dropout
Layers is conv_batchnorm_drop_rate. Default 0.0.
pool_stride (int|list|tuple): The pooling stride of Pool2D layer. If pool_stride
is a list or tuple, it must contain two integers, (pooling_stride_H,
pooling_stride_W). Otherwise, the pooling_stride_H = pooling_stride_W = pool_stride.
Default 1.
pool_type (str): Pooling type can be :math:`max` for max-pooling and :math:`avg` for
average-pooling. Default :math:`max`.
use_cudnn (bool): Use cudnn kernel or not, it is valid only when the cudnn
library is installed. Default: True
Return:
A Variable holding Tensor representing the final result after serial computation using Convolution2d,
BatchNorm, DropOut, and Pool2D, whose data type is the same with input.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
img = fluid.data(name='img', shape=[None, 1, 28, 28], dtype='float32')
conv_pool = fluid.nets.img_conv_group(input=img,
conv_padding=1,
conv_num_filter=[3, 3],
conv_filter_size=3,
conv_act="relu",
pool_size=2,
pool_stride=2)
"""
tmp = input
assert isinstance(conv_num_filter, list) or \
isinstance(conv_num_filter, tuple)
def __extend_list__(obj):
if not hasattr(obj, '__len__'):
return [obj] * len(conv_num_filter)
else:
assert len(obj) == len(conv_num_filter)
return obj
conv_padding = __extend_list__(conv_padding)
conv_filter_size = __extend_list__(conv_filter_size)
param_attr = __extend_list__(param_attr)
conv_with_batchnorm = __extend_list__(conv_with_batchnorm)
conv_batchnorm_drop_rate = __extend_list__(conv_batchnorm_drop_rate)
for i in six.moves.range(len(conv_num_filter)):
local_conv_act = conv_act
if conv_with_batchnorm[i]:
local_conv_act = None
tmp = layers.conv2d(
input=tmp,
num_filters=conv_num_filter[i],
filter_size=conv_filter_size[i],
padding=conv_padding[i],
param_attr=param_attr[i],
act=local_conv_act,
use_cudnn=use_cudnn)
if conv_with_batchnorm[i]:
tmp = layers.batch_norm(input=tmp, act=conv_act)
drop_rate = conv_batchnorm_drop_rate[i]
if abs(drop_rate) > 1e-5:
tmp = layers.dropout(x=tmp, dropout_prob=drop_rate)
pool_out = layers.pool2d(
input=tmp,
pool_size=pool_size,
pool_type=pool_type,
pool_stride=pool_stride,
use_cudnn=use_cudnn)
return pool_out
def sequence_conv_pool(input,
num_filters,
filter_size,
param_attr=None,
act="sigmoid",
pool_type="max",
bias_attr=None):
"""
:api_attr: Static Graph
**This api takes input as an LoDTensor. If input is a Tensor, please use**
:ref:`api_fluid_nets_simple_img_conv_pool` **instead**
The sequence_conv_pool is composed of :ref:`api_fluid_layers_sequence_conv`
and :ref:`api_fluid_layers_sequence_pool` .
Args:
input (Variable): 2-D LoDTensor, the input of sequence_conv,
which supports variable-time length input sequence.
The underlying of input is a matrix with shape
(T, N), where T is the total time steps in this mini-batch and N is
the input_hidden_size. The data type is float32 or float64.
num_filters(int): The number of filter.
filter_size (int): The filter size.
param_attr (ParamAttr): The parameters of the sequence_conv Layer. Default: None.
act (str|None): Activation type for Sequence_conv Layer.
If set to None, no activation will be applied. Default: "sigmoid".
pool_type (str): Pooling type can be :math:`max` for max-pooling, :math:`average` for
average-pooling, :math:`sum` for sum-pooling, :math:`sqrt` for sqrt-pooling.
Default :math:`max`.
bias_attr (ParamAttr|bool|None): The parameter attribute for the bias of sequence_conv.
If it is set to False, no bias will be added to the output units.
If it is set to None or one attribute of ParamAttr, sequence_conv
will create ParamAttr as bias_attr. If the Initializer of the bias_attr
is not set, the bias is initialized zero. Default: None.
Returns:
The final result after sequence_conv and sequence_pool.
It is a 2-D Tensor, with the same data type as :attr:`input`
Return Type:
Variable
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
input_dim = 100 #len(word_dict)
emb_dim = 128
hid_dim = 512
data = fluid.data(name="words", shape=[None, 1], dtype="int64", lod_level=1)
emb = fluid.layers.embedding(input=data, size=[input_dim, emb_dim], is_sparse=True)
seq_conv = fluid.nets.sequence_conv_pool(input=emb,
num_filters=hid_dim,
filter_size=3,
act="tanh",
pool_type="sqrt")
"""
check_variable_and_dtype(input, 'input', ['float32', 'float64'], 'input')
conv_out = layers.sequence_conv(
input=input,
num_filters=num_filters,
filter_size=filter_size,
param_attr=param_attr,
bias_attr=bias_attr,
act=act)
pool_out = layers.sequence_pool(input=conv_out, pool_type=pool_type)
return pool_out
@deprecated(since="2.0.0", update_to="paddle.nn.functional.glu")
def glu(input, dim=-1):
r"""
:api_attr: Static Graph
The Gated Linear Units(GLU) composed by :ref:`api_fluid_layers_split` ,
:ref:`api_fluid_layers_sigmoid` and :ref:`api_fluid_layers_elementwise_mul` .
Specifically, GLU will plit the input into two equal-sized parts,
:math:`a` and :math:`b`, along the given dimension and then compute as
following:
.. math::
{GLU}(a, b)= a \otimes \sigma(b)
Refer to `Language Modeling with Gated Convolutional Networks
<https://arxiv.org/pdf/1612.08083.pdf>`_.
Args:
input (Variable): The input variable which is a Tensor or LoDTensor.
The supported data types include float32, float64
and float16 (only for GPU).
dim (int, optional): The dimension along which to split. If :math:`dim < 0`, the
dimension to split along is :math:`rank(input) + dim`. Default -1.
Returns:
Variable: Variable with half the size and same data type of input.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
data = fluid.data(
name="words", shape=[-1, 6, 3, 9], dtype="float32")
# shape of output: [-1, 3, 3, 9]
output = fluid.nets.glu(input=data, dim=1)
"""
check_variable_and_dtype(input, 'input', ['float16', 'float32', 'float64'],
"glu")
a, b = layers.split(input, num_or_sections=2, dim=dim)
act_b = layers.sigmoid(x=b)
out = layers.elementwise_mul(x=a, y=act_b)
return out
def scaled_dot_product_attention(queries,
keys,
values,
num_heads=1,
dropout_rate=0.):
r"""
:api_attr: Static Graph
This interface Multi-Head Attention using scaled dot product.
Attention mechanism can be seen as mapping a query and a set of key-value
pairs to an output. Multi-Head Attention performs attention using multi-head
parallel, and the inputs of attention would be transformed by linear projection.
The formula is as follows:
.. math::
MultiHead(Q, K, V ) & = Concat(head_1, ..., head_h)
where \ head_i & = Attention(QW_i^Q , KW_i^K , VW_i^V )
Attention(Q, K, V) & = softmax (\\frac{QK^\mathrm{T}}{\sqrt{d_k}}) V
For more details, please refer to `Attention Is All You Need
<https://arxiv.org/pdf/1706.03762.pdf>`_ .
Note that the implementation is adapted to batch, and all matrix multiplication
in :math:`Attention(Q, K, V)` is batched matrix multiplication. Refer to
:ref:`api_fluid_layers_matmul` .
Args:
queries (Variable): A 3-D Tensor with shape :math:`[N, L_q, d_k \\times h]` ,
where :math:`N` stands for batch size, :math:`L_q` for the sequence length
of query, :math:`d_k \\times h` for the feature size of query, :math:`h` for
head number. The data type should be float32 or float64.
keys (Variable): A 3-D Tensor with shape :math:`[N, L_k, d_k \\times h]` ,
where :math:`N` stands for batch size, :math:`L_k` for the sequence length
of key, :math:`d_k \\times h` for the feature size of key, :math:`h` for head
number. The data type should be the same as ``queries`` .
values (Variable): A 3-D Tensor with shape :math:`[N, L_k, d_v \\times h]` ,
where :math:`N` stands for batch size, :math:`L_k` for the sequence length
of key, :math:`d_v \\times h` for the feature size of value, :math:`h` for head
number. The data type should be the same as ``queries`` .
num_heads (int, optional): Indicate the number of head. If the number
is 1, linear projection would not be performed on inputs. Default: 1.
dropout_rate (float, optional): The rate to drop the attention weight.
Default: 0.0, which means no dropout.
Returns:
Variable: A 3-D Tensor with shape :math:`[N, L_q, d_v \\times h]` , \
where :math:`N` stands for batch size, :math:`L_q` for the sequence \
length of query, :math:`d_v \\times h` for the feature size of value. \
It has the same data type with inputs, representing the output of \
Multi-Head Attention.
Raises:
TypeError: The dtype of inputs keys, values and queries should be the same.
ValueError: Inputs queries, keys and values should all be 3-D tensors.
ValueError: The hidden size of queries and keys should be the same.
ValueError: The max sequence length in value batch and in key batch should be the same.
ValueError: he hidden size of keys must be divisible by the number of attention heads.
ValueError: he hidden size of values must be divisible by the number of attention heads.
Examples:
.. code-block:: python
import paddle.fluid as fluid
import paddle
paddle.enable_static()
queries = fluid.data(name="queries", shape=[3, 5, 9], dtype="float32")
keys = fluid.data(name="keys", shape=[3, 6, 9], dtype="float32")
values = fluid.data(name="values", shape=[3, 6, 10], dtype="float32")
contexts = fluid.nets.scaled_dot_product_attention(queries, keys, values)
contexts.shape # [3, 5, 10]
"""
check_variable_and_dtype(queries, 'queries', ['float32', 'float64'],
"scaled_dot_product_attention")
check_variable_and_dtype(keys, 'keys', ['float32', 'float64'],
"scaled_dot_product_attention")
check_variable_and_dtype(values, 'values', ['float32', 'float64'],
"scaled_dot_product_attention")
if not (queries.dtype == keys.dtype == values.dtype):
raise TypeError(
"The dtype of keys, values and queries should be the same."
"But received queries.dtype = %s, "
" keys.dtype = %s, values.dtype) = %s." %
(convert_dtype(queries.dtype), convert_dtype(keys.dtype),
convert_dtype(values.dtype)))
if not (len(queries.shape) == len(keys.shape) == len(values.shape) == 3):
raise ValueError(
"Inputs queries, keys and values should all be 3-D tensors."
"But received len(queries.shape) = %d, "
"len(keys.shape) = %d, len(values.shape) = %d." %
(len(queries.shape), len(keys.shape), len(values.shape)))
if queries.shape[-1] != keys.shape[-1]:
raise ValueError(
"The hidden size of queries and keys should be the same."
"But received queries' hidden size = %d and keys' hidden size = %d."
% (queries.shape[-1], keys.shape[-1]))
if keys.shape[-2] != values.shape[-2]:
raise ValueError(
"The max sequence length in value batch and in key batch "
"should be the same. But received max sequence length in value batch "
"= %d, in key batch = %d." % (values.shape[-2], keys.shape[-2]))
if keys.shape[-1] % num_heads != 0:
raise ValueError("The hidden size of keys (%d) must be divisible "
"by the number of attention heads (%d)." %
(keys.shape[-1], num_heads))
if values.shape[-1] % num_heads != 0:
raise ValueError("The hidden size of values (%d) must be divisible "
"by the number of attention heads (%d)." %
(values.shape[-1], num_heads))
def __compute_qkv(queries, keys, values, num_heads):
"""
Add linear projection to queries, keys, and values.
Args:
queries(Tensor): a 3-D input Tensor.
keys(Tensor): a 3-D input Tensor.
values(Tensor): a 3-D input Tensor.
num_heads(int): The number of heads. Linearly project the inputs
ONLY when num_heads > 1.
Returns:
Tensor: linearly projected output Tensors: queries', keys' and
values'. They have the same shapes with queries, keys and
values.
"""
if num_heads == 1:
return queries, keys, values
q = layers.fc(input=queries, size=queries.shape[-1], num_flatten_dims=2)
k = layers.fc(input=keys, size=keys.shape[-1], num_flatten_dims=2)
v = layers.fc(input=values, size=values.shape[-1], num_flatten_dims=2)
return q, k, v
def __split_heads(x, num_heads):
"""
Reshape the last dimension of input tensor x so that it becomes two
dimensions.
Args:
x(Tensor): a 3-D input Tensor.
num_heads(int): The number of heads.
Returns:
Tensor: a Tensor with shape [..., n, m/num_heads], where m is size
of the last dimension of x.
"""
if num_heads == 1:
return x
hidden_size = x.shape[-1]
# reshape the 3-D input: [batch_size, max_sequence_length, hidden_dim]
# into a 4-D output:
# [batch_size, max_sequence_length, num_heads, hidden_size_per_head].
reshaped = layers.reshape(
x=x,
shape=list(x.shape[:-1]) + [num_heads, hidden_size // num_heads])
# permute the dimensions into:
# [batch_size, num_heads, max_sequence_len, hidden_size_per_head]
return layers.transpose(x=reshaped, perm=[0, 2, 1, 3])
def __combine_heads(x):
"""
Reshape the last two dimensions of input tensor x so that it becomes
one dimension.
Args:
x(Tensor): a 4-D input Tensor with shape
[bs, num_heads, max_sequence_length, hidden_dim].
Returns:
Tensor: a Tensor with shape
[bs, max_sequence_length, num_heads * hidden_dim].
"""
if len(x.shape) == 3: return x
if len(x.shape) != 4:
raise ValueError("Input(x) should be a 4-D Tensor.")
trans_x = layers.transpose(x, perm=[0, 2, 1, 3])
return layers.reshape(
x=trans_x,
shape=list(
map(int, [
trans_x.shape[0], trans_x.shape[1], trans_x.shape[2] *
trans_x.shape[3]
])))
q, k, v = __compute_qkv(queries, keys, values, num_heads)
q = __split_heads(q, num_heads)
k = __split_heads(k, num_heads)
v = __split_heads(v, num_heads)
key_dim_per_head = keys.shape[-1] // num_heads
scaled_q = layers.scale(x=q, scale=key_dim_per_head**-0.5)
product = layers.matmul(x=scaled_q, y=k, transpose_y=True)
weights = layers.reshape(
x=layers.reshape(
x=product, shape=[-1, product.shape[-1]], act="softmax"),
shape=product.shape)
if dropout_rate:
weights = layers.dropout(
weights, dropout_prob=dropout_rate, is_test=False)
ctx_multiheads = layers.matmul(weights, v)
return __combine_heads(ctx_multiheads)
| PaddlePaddle/Paddle | python/paddle/fluid/nets.py | Python | apache-2.0 | 27,095 | 0.004687 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
JWT tokens (for web interface, mostly, as all peer operations function on
public key cryptography)
JWT tokens can be one of:
* Good
* Expired
* Invalid
And granting them should not take database access. They are meant to
figure out if a user is auth'd without using the database to do so.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
from ...utils.timing import TimedTestCase
from ..token import token, jwt_get, jwt_use
class test_token(TimedTestCase):
def test_good_token(self):
"""Valid JWT Token"""
self.threshold = .32
bob = token(u'bob')
example = bob.make(u'print')
bob.check(example)
def test_expired_token(self):
"""Expire a token..."""
self.threshold = .1
a = datetime.datetime.now()
assert a != None
def test_invalid_token(self):
"""Invalid Tokens"""
self.threshold = .1
fred = token(u'fred')
alice = token(u'alice')
wrong = fred.make(u'well then')
alice.check(wrong)
class test_jwt(TimedTestCase):
def test_routes(self):
self.threshold = .1
tok = jwt_get(u'ten')
res = jwt_use(tok)
print(res)
| Thetoxicarcade/ac | congredi/auth/test/test_token.py | Python | gpl-3.0 | 1,288 | 0.000776 |
# -*- coding: utf-8 -*-
from module.plugins.internal.DeadHoster import DeadHoster
class OronCom(DeadHoster):
__name__ = "OronCom"
__type__ = "hoster"
__version__ = "0.18"
__status__ = "stable"
__pattern__ = r'https?://(?:www\.)?oron\.com/\w{12}'
__config__ = [] # @TODO: Remove in 0.4.10
__description__ = """Oron.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("chrox", "chrox@pyload.org"),
("DHMH", "DHMH@pyload.org")]
| rlindner81/pyload | module/plugins/hoster/OronCom.py | Python | gpl-3.0 | 493 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2014 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
depends_on = (
('lang', '0003_auto__add_field_language_plural_type'),
)
def forwards(self, orm):
# Changing field 'SubProject.template'
db.alter_column('trans_subproject', 'template', self.gf('django.db.models.fields.CharField')(max_length=200, null=False))
def backwards(self, orm):
# Changing field 'SubProject.template'
db.alter_column('trans_subproject', 'template', self.gf('django.db.models.fields.CharField')(default='', max_length=200))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lang.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'direction': ('django.db.models.fields.CharField', [], {'default': "'ltr'", 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'nplurals': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'plural_type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'pluralequation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'trans.change': {
'Meta': {'ordering': "['-timestamp']", 'object_name': 'Change'},
'action': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Translation']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Unit']", 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'trans.check': {
'Meta': {'object_name': 'Check'},
'check': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignore': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']", 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"})
},
'trans.comment': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Comment'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']", 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'trans.dictionary': {
'Meta': {'ordering': "['source']", 'object_name': 'Dictionary'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'trans.indexupdate': {
'Meta': {'object_name': 'IndexUpdate'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Unit']"})
},
'trans.project': {
'Meta': {'ordering': "['name']", 'object_name': 'Project'},
'commit_message': ('django.db.models.fields.TextField', [], {'default': "'Translated using Weblate (%(language_name)s)\\n\\nCurrently translated at %(translated_percent)s%% (%(translated)s of %(total)s strings)'"}),
'committer_email': ('django.db.models.fields.EmailField', [], {'default': "'noreply@weblate.org'", 'max_length': '75'}),
'committer_name': ('django.db.models.fields.CharField', [], {'default': "'Weblate'", 'max_length': '200'}),
'enable_acl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instructions': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'mail': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'merge_style': ('django.db.models.fields.CharField', [], {'default': "'merge'", 'max_length': '10'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'new_lang': ('django.db.models.fields.CharField', [], {'default': "'contact'", 'max_length': '10'}),
'push_on_commit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'set_translation_team': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'web': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'trans.subproject': {
'Meta': {'ordering': "['project__name', 'name']", 'unique_together': "(('project', 'name'), ('project', 'slug'))", 'object_name': 'SubProject'},
'allow_translation_propagation': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'branch': ('django.db.models.fields.CharField', [], {'default': "'master'", 'max_length': '50'}),
'file_format': ('django.db.models.fields.CharField', [], {'default': "'auto'", 'max_length': '50'}),
'filemask': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'push': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'report_source_bugs': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'repoweb': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'})
},
'trans.suggestion': {
'Meta': {'object_name': 'Suggestion'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']"}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Project']"}),
'target': ('django.db.models.fields.TextField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'trans.translation': {
'Meta': {'ordering': "['language__name']", 'object_name': 'Translation'},
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'fuzzy': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lang.Language']"}),
'language_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20'}),
'lock_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'lock_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'revision': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'subproject': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.SubProject']"}),
'total': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'translated': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'trans.unit': {
'Meta': {'ordering': "['position']", 'object_name': 'Unit'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'db_index': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'flags': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'fuzzy': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'previous_source': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'source': ('django.db.models.fields.TextField', [], {}),
'target': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'translated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['trans.Translation']"})
}
}
complete_apps = ['trans']
| paour/weblate | weblate/trans/migrations/0027_auto__chg_field_subproject_template.py | Python | gpl-3.0 | 15,556 | 0.007716 |
# @brief helper function to turn pkgconfig files into ASKAP package.info
#
# @copyright (c) 2006 CSIRO
# Australia Telescope National Facility (ATNF)
# Commonwealth Scientific and Industrial Research Organisation (CSIRO)
# PO Box 76, Epping NSW 1710, Australia
# atnf-enquiries@csiro.au
#
# This file is part of the ASKAP software distribution.
#
# The ASKAP software distribution is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the License
# or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
#
import os
import re
import string
def _replace_vars(lst, vardict):
'''a generator to replace allcaps variables found in .pc files
:param lst: a list of strings
:param vardict: the dictionary of variable definitions
'''
varrx = re.compile("\$\{([A-Z_]+)\}")
for item in lst:
vnames = varrx.search(item)
if vnames:
for v in vnames.groups():
dv = vardict.get(v, None)
if dv is not None:
replaced = varrx.sub(dv, item)
yield replaced
else:
yield item
def to_info(pkgfile=None):
'''To be executed from the build.py directory.
This will extract the information from a pkgconfig file and
writes it to a ASKAPsoft 'package.info' file.
This will only work if there is not already a 'package.info'.
@param pkgfile The path to the .pc file. Default None, means
look for a '.pc' file in 'install/lib/pkgconfig'
'''
if os.path.exists("package.info"):
# nothing to do
return
if not pkgfile:
pcdir = "install/lib/pkgconfig"
if not os.path.exists(pcdir):
return
files = os.listdir(pcdir)
if not files:
# assume no dependencies
return
# there should only be one pc file
pkgfile = os.path.join(pcdir, files[0])
incdir = None
libdir = None
libs = []
outlibs=[]
varnames = {}
varrx = re.compile("\$\{\w*prefix\}/")
f = file(pkgfile)
for line in f.readlines():
line = line.strip()
if line.count(":"):
k,v = line.split(":")
if k.startswith("Libs"):
ls = v.split()
for l in ls:
if l.startswith("-l"):
libs.append(l[2:])
if line.count("="):
k,v = line.split("=")
if varrx.search(v):
v = varrx.sub("", v)
varnames[k] = v
f.close()
outlibs = [i for i in _replace_vars(libs, varnames)]
incdir = [i for i in _replace_vars([varnames["includedir"]], varnames)][0]
if incdir == "include":
incdir = None
libdir = [i for i in _replace_vars([varnames["libdir"]], varnames)][0]
if libdir == "lib":
libdir = None
outtxt = "# Auto-generated by build.py - DO NOT MODIFY\n"
outtxt += "libs=%s\n" % string.join(outlibs)
if libdir: outtxt += "libdir=%s\n" % libdir
if incdir: outtxt += "incdir=%s\n" % incdir
f = file("package.info", "w+")
f.write(outtxt)
f.close()
| ATNF/askapsdp | Tools/Dev/rbuild/askapdev/rbuild/utils/pkginfo.py | Python | gpl-2.0 | 3,666 | 0.004364 |
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import base64
import sys
import os
import os.path
import difflib
import urllib
import urllib2
import re
import unicodedata
import json
import logging
import string
import random
import cPickle as pickle
from hashlib import sha1
from datetime import datetime, timedelta
from collections import defaultdict
import shlex
import socket
from functools import partial
from cStringIO import StringIO
import tg
import genshi.template
try:
import cchardet as chardet
except ImportError:
import chardet
import pkg_resources
from formencode.validators import FancyValidator
from dateutil.parser import parse
from bson import ObjectId
from paste.deploy import appconfig
from pymongo.errors import InvalidId
from contextlib import contextmanager
from pylons import tmpl_context as c, app_globals as g
from pylons import response, request
from tg.decorators import before_validate
from formencode.variabledecode import variable_decode
import formencode
from jinja2 import Markup
from jinja2.filters import contextfilter, escape
from paste.deploy.converters import asbool, aslist, asint
from webhelpers import date, feedgenerator, html, number, misc, text
from webob.exc import HTTPUnauthorized
from allura.lib import exceptions as exc
from allura.lib import AsciiDammit
from allura.lib import utils
# import to make available to templates, don't delete:
from .security import has_access, is_allowed_by_role
log = logging.getLogger(__name__)
# http://stackoverflow.com/questions/2063213/regular-expression-for-validating-dns-label-host-name
# modified to remove capital A-Z and make length parameterized
# and not use lookbehind assertion since JS doesn't support that
dns_var_length = r'^(?![0-9]+$)(?!-)[a-z0-9-]{%s}[a-z0-9]$'
# project & tool names must comply to DNS since used in subdomains for emailing
re_mount_points = {
're_project_name': dns_var_length % '2,14', # validates project, subproject, and user names
're_tool_mount_point': dns_var_length % '0,62', # validates tool mount point names
're_tool_mount_point_fragment': r'[a-z][-a-z0-9]*',
're_relaxed_tool_mount_point': r'^[a-zA-Z0-9][-a-zA-Z0-9_\.\+]{0,62}$',
're_relaxed_tool_mount_point_fragment': r'[a-zA-Z0-9][-a-zA-Z0-9_\.\+]*'
}
# validates project, subproject, and user names
re_project_name = re.compile(re_mount_points['re_project_name'])
# validates tool mount point names
re_tool_mount_point = re.compile(re_mount_points['re_tool_mount_point'])
re_tool_mount_point_fragment = re.compile(re_mount_points['re_tool_mount_point_fragment'])
re_relaxed_tool_mount_point = re.compile(re_mount_points['re_relaxed_tool_mount_point'])
re_relaxed_tool_mount_point_fragment = re.compile(re_mount_points['re_relaxed_tool_mount_point_fragment'])
re_clean_vardec_key = re.compile(r'''\A
( # first part
\w+# name...
(-\d+)?# with optional -digits suffix
)
(\. # next part(s)
\w+# name...
(-\d+)?# with optional -digits suffix
)+
\Z''', re.VERBOSE)
# markdown escaping regexps
re_amp = re.compile(r'''
[&] # amp
(?= # look ahead for:
([a-zA-Z0-9]+;) # named HTML entity
|
(\#[0-9]+;) # decimal entity
|
(\#x[0-9A-F]+;) # hex entity
)
''', re.VERBOSE)
re_leading_spaces = re.compile(r'^[\t ]+', re.MULTILINE)
re_preserve_spaces = re.compile(r'''
[ ] # space
(?=[ ]) # lookahead for a space
''', re.VERBOSE)
re_angle_bracket_open = re.compile('<')
re_angle_bracket_close = re.compile('>')
md_chars_matcher_all = re.compile(r"([`\*_{}\[\]\(\)#!\\\.+-])")
def make_safe_path_portion(ustr, relaxed=True):
"""Return an ascii representation of ``ustr`` that conforms to mount point
naming :attr:`rules <re_tool_mount_point_fragment>`.
Will return an empty string if no char in ``ustr`` is latin1-encodable.
:param relaxed: Use relaxed mount point naming rules (allows more
characters. See :attr:`re_relaxed_tool_mount_point_fragment`.
:returns: The converted string.
"""
regex = (re_relaxed_tool_mount_point_fragment if relaxed else
re_tool_mount_point_fragment)
ustr = really_unicode(ustr)
s = ustr.encode('latin1', 'ignore')
s = AsciiDammit.asciiDammit(s)
if not relaxed:
s = s.lower()
s = '-'.join(regex.findall(s))
s = s.replace('--', '-')
return s
def escape_json(data):
return json.dumps(data).replace('<', '\u003C')
def monkeypatch(*objs):
def patchem(func):
for obj in objs:
setattr(obj, func.__name__, func)
return patchem
def urlquote(url, safe="/"):
try:
return urllib.quote(str(url), safe=safe)
except UnicodeEncodeError:
return urllib.quote(url.encode('utf-8'), safe=safe)
def urlquoteplus(url, safe=""):
try:
return urllib.quote_plus(str(url), safe=safe)
except UnicodeEncodeError:
return urllib.quote_plus(url.encode('utf-8'), safe=safe)
def _attempt_encodings(s, encodings):
if s is None:
return u''
for enc in encodings:
try:
if enc is None:
return unicode(s) # try default encoding
else:
return unicode(s, enc)
except (UnicodeDecodeError, LookupError):
pass
# Return the repr of the str -- should always be safe
return unicode(repr(str(s)))[1:-1]
def really_unicode(s):
# Try to guess the encoding
def encodings():
yield None
yield 'utf-8'
yield chardet.detect(s[:1024])['encoding']
yield chardet.detect(s)['encoding']
yield 'latin-1'
return _attempt_encodings(s, encodings())
def find_user(email):
from allura import model as M
return M.User.by_email_address(email)
def find_project(url_path):
from allura import model as M
for n in M.Neighborhood.query.find():
if url_path.strip("/").startswith(n.url_prefix.strip("/")):
break
else:
return None, url_path
# easily off-by-one, might be better to join together everything but
# url_prefix
project_part = n.shortname_prefix + url_path[len(n.url_prefix):]
parts = project_part.split('/')
length = len(parts)
while length:
shortname = '/'.join(parts[:length])
p = M.Project.query.get(shortname=shortname, deleted=False,
neighborhood_id=n._id)
if p:
return p, parts[length:]
length -= 1
return None, url_path.split('/')
def make_neighborhoods(ids):
return _make_xs('Neighborhood', ids)
def make_projects(ids):
return _make_xs('Project', ids)
def make_users(ids):
return _make_xs('User', ids)
def make_roles(ids):
return _make_xs('ProjectRole', ids)
def _make_xs(X, ids):
from allura import model as M
X = getattr(M, X)
ids = list(ids)
results = dict(
(r._id, r)
for r in X.query.find(dict(_id={'$in': ids})))
result = (results.get(i) for i in ids)
return (r for r in result if r is not None)
def make_app_admin_only(app):
from allura.model.auth import ProjectRole
admin_role = ProjectRole.by_name('Admin', app.project)
for ace in [ace for ace in app.acl if ace.role_id != admin_role._id]:
app.acl.remove(ace)
@contextmanager
def push_config(obj, **kw):
saved_attrs = {}
new_attrs = []
for k, v in kw.iteritems():
try:
saved_attrs[k] = getattr(obj, k)
except AttributeError:
new_attrs.append(k)
setattr(obj, k, v)
try:
yield obj
finally:
for k, v in saved_attrs.iteritems():
setattr(obj, k, v)
for k in new_attrs:
delattr(obj, k)
def sharded_path(name, num_parts=2):
parts = [
name[:i + 1]
for i in range(num_parts)]
return '/'.join(parts)
def set_context(project_shortname_or_id, mount_point=None, app_config_id=None, neighborhood=None):
from allura import model
try:
p = model.Project.query.get(_id=ObjectId(str(project_shortname_or_id)))
except InvalidId:
p = None
if p is None and type(project_shortname_or_id) != ObjectId:
if neighborhood is None:
raise TypeError('neighborhood is required; it must not be None')
if not isinstance(neighborhood, model.Neighborhood):
n = model.Neighborhood.query.get(name=neighborhood)
if n is None:
try:
n = model.Neighborhood.query.get(
_id=ObjectId(str(neighborhood)))
except InvalidId:
pass
if n is None:
raise exc.NoSuchNeighborhoodError(
"Couldn't find neighborhood %s" %
repr(neighborhood))
neighborhood = n
query = dict(shortname=project_shortname_or_id,
neighborhood_id=neighborhood._id)
p = model.Project.query.get(**query)
if p is None:
raise exc.NoSuchProjectError("Couldn't find project %s nbhd %s" %
(project_shortname_or_id, neighborhood))
c.project = p
if app_config_id is None:
c.app = p.app_instance(mount_point)
else:
if isinstance(app_config_id, basestring):
app_config_id = ObjectId(app_config_id)
app_config = model.AppConfig.query.get(_id=app_config_id)
c.app = p.app_instance(app_config)
@contextmanager
def push_context(project_id, mount_point=None, app_config_id=None, neighborhood=None):
project = getattr(c, 'project', ())
app = getattr(c, 'app', ())
set_context(project_id, mount_point, app_config_id, neighborhood)
try:
yield
finally:
if project == ():
del c.project
else:
c.project = project
if app == ():
del c.app
else:
c.app = app
def encode_keys(d):
'''Encodes the unicode keys of d, making the result
a valid kwargs argument'''
return dict(
(k.encode('utf-8'), v)
for k, v in d.iteritems())
def vardec(fun):
def vardec_hook(remainder, params):
new_params = variable_decode(dict(
(k, v) for k, v in params.items()
if re_clean_vardec_key.match(k)))
params.update(new_params)
before_validate(vardec_hook)(fun)
return fun
def convert_bools(conf, prefix=''):
'''
For a given dict, automatically convert any true/false string values into bools.
Only applies to keys starting with the prefix.
:param dict conf:
:param str prefix:
:return: dict
'''
def convert_value(val):
if isinstance(val, basestring):
if val.strip().lower() == 'true':
return True
elif val.strip().lower() == 'false':
return False
return val
return {
k: (convert_value(v) if k.startswith(prefix) else v)
for k, v in conf.iteritems()
}
def nonce(length=4):
return sha1(ObjectId().binary + os.urandom(10)).hexdigest()[:length]
def cryptographic_nonce(length=40):
hex_format = '%.2x' * length
return hex_format % tuple(map(ord, os.urandom(length)))
def random_password(length=20, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for x in range(length))
def ago(start_time, show_date_after=7):
"""
Return time since starting time as a rounded, human readable string.
E.g., "3 hours ago"
"""
if start_time is None:
return 'unknown'
granularities = ['century', 'decade', 'year', 'month', 'day', 'hour', 'minute', 'second']
end_time = datetime.utcnow()
if show_date_after is not None and end_time - start_time > timedelta(days=show_date_after):
return start_time.strftime('%Y-%m-%d')
while True:
granularity = granularities.pop()
ago = date.distance_of_time_in_words(start_time, end_time, granularity, round=True)
rounded_to_one_granularity = 'and' not in ago
if rounded_to_one_granularity:
break
return ago + ' ago'
def ago_ts(timestamp):
return ago(datetime.utcfromtimestamp(timestamp))
def ago_string(s):
try:
return ago(parse(s, ignoretz=True))
except (ValueError, AttributeError):
return 'unknown'
class DateTimeConverter(FancyValidator):
def _to_python(self, value, state):
try:
return parse(value)
except ValueError:
if self.if_invalid != formencode.api.NoDefault:
return self.if_invalid
else:
raise
def _from_python(self, value, state):
return value.isoformat()
def absurl(url):
"""
Given a root-relative URL, return a full URL including protocol and host
"""
if url is None:
return None
if '://' in url:
return url
host = tg.config['base_url'].rstrip('/')
return host + url
def diff_text(t1, t2, differ=None):
t1_lines = t1.replace('\r', '').split('\n')
t2_lines = t2.replace('\r', '').split('\n')
t1_words = []
for line in t1_lines:
for word in line.split(' '):
t1_words.append(word)
t1_words.append('\n')
t2_words = []
for line in t2_lines:
for word in line.split(' '):
t2_words.append(word)
t2_words.append('\n')
if differ is None:
differ = difflib.SequenceMatcher(None, t1_words, t2_words)
result = []
for tag, i1, i2, j1, j2 in differ.get_opcodes():
if tag in ('delete', 'replace'):
result += ['<del>'] + t1_words[i1:i2] + ['</del>']
if tag in ('insert', 'replace'):
result += ['<ins>'] + t2_words[j1:j2] + ['</ins>']
if tag == 'equal':
result += t1_words[i1:i2]
return ' '.join(result).replace('\n', '<br/>\n')
def gen_message_id(_id=None):
if not _id:
_id = nonce(40)
if getattr(c, 'project', None):
parts = c.project.url().split('/')[1:-1]
else:
parts = ['mail']
if getattr(c, 'app', None):
addr = '%s.%s' % (_id, c.app.config.options['mount_point'])
else:
addr = _id
return '%s@%s.%s' % (
addr, '.'.join(reversed(parts)), tg.config['domain'])
class ProxiedAttrMeta(type):
def __init__(cls, name, bases, dct):
for v in dct.itervalues():
if isinstance(v, attrproxy):
v.cls = cls
class attrproxy(object):
cls = None
def __init__(self, *attrs):
self.attrs = attrs
def __repr__(self):
return '<attrproxy on %s for %s>' % (
self.cls, self.attrs)
def __get__(self, obj, klass=None):
if obj is None:
obj = klass
for a in self.attrs:
obj = getattr(obj, a)
return proxy(obj)
def __getattr__(self, name):
if self.cls is None:
return promised_attrproxy(lambda: self.cls, name)
return getattr(
attrproxy(self.cls, *self.attrs),
name)
class promised_attrproxy(attrproxy):
def __init__(self, promise, *attrs):
super(promised_attrproxy, self).__init__(*attrs)
self._promise = promise
def __repr__(self):
return '<promised_attrproxy for %s>' % (self.attrs,)
def __getattr__(self, name):
cls = self._promise()
return getattr(cls, name)
class proxy(object):
def __init__(self, obj):
self._obj = obj
def __getattr__(self, name):
return getattr(self._obj, name)
def __call__(self, *args, **kwargs):
return self._obj(*args, **kwargs)
class fixed_attrs_proxy(proxy):
"""
On attribute lookup, if keyword parameter matching attribute name was
provided during object construction, returns it's value. Otherwise proxies
to obj.
"""
def __init__(self, obj, **kw):
self._obj = obj
for k, v in kw.iteritems():
setattr(self, k, v)
def render_genshi_plaintext(template_name, **template_vars):
assert os.path.exists(template_name)
fd = open(template_name)
try:
tpl_text = fd.read()
finally:
fd.close()
filepath = os.path.dirname(template_name)
tt = genshi.template.NewTextTemplate(tpl_text,
filepath=filepath, filename=template_name)
stream = tt.generate(**template_vars)
return stream.render(encoding='utf-8').decode('utf-8')
@tg.expose(content_type='text/plain')
def json_validation_error(controller, **kwargs):
result = dict(status='Validation Error',
errors=c.validation_exception.unpack_errors(),
value=c.validation_exception.value,
params=kwargs)
response.status = 400
return json.dumps(result, indent=2)
def pop_user_notifications(user=None):
from allura import model as M
if user is None:
user = c.user
mbox = M.Mailbox.query.get(user_id=user._id, is_flash=True)
if mbox:
notifications = M.Notification.query.find(
dict(_id={'$in': mbox.queue}))
mbox.queue = []
mbox.queue_empty = True
for n in notifications:
# clean it up so it doesn't hang around
M.Notification.query.remove({'_id': n._id})
yield n
def config_with_prefix(d, prefix):
'''Return a subdictionary keys with a given prefix,
with the prefix stripped
'''
plen = len(prefix)
return dict((k[plen:], v) for k, v in d.iteritems()
if k.startswith(prefix))
@contextmanager
def twophase_transaction(*engines):
connections = [
e.contextual_connect()
for e in engines]
txns = []
to_rollback = []
try:
for conn in connections:
txn = conn.begin_twophase()
txns.append(txn)
to_rollback.append(txn)
yield
to_rollback = []
for txn in txns:
txn.prepare()
to_rollback.append(txn)
for txn in txns:
txn.commit()
except:
for txn in to_rollback:
txn.rollback()
raise
class log_action(object):
extra_proto = dict(
action=None,
action_type=None,
tool_type=None,
tool_mount=None,
project=None,
neighborhood=None,
username=None,
url=None,
ip_address=None)
def __init__(self, logger, action):
self._logger = logger
self._action = action
def log(self, level, message, *args, **kwargs):
kwargs = dict(kwargs)
extra = kwargs.setdefault('extra', {})
meta = kwargs.pop('meta', {})
kwpairs = extra.setdefault('kwpairs', {})
for k, v in meta.iteritems():
kwpairs['meta_%s' % k] = v
extra.update(self._make_extra())
self._logger.log(level, self._action + ': ' + message, *args, **kwargs)
def info(self, message, *args, **kwargs):
self.log(logging.INFO, message, *args, **kwargs)
def debug(self, message, *args, **kwargs):
self.log(logging.DEBUG, message, *args, **kwargs)
def error(self, message, *args, **kwargs):
self.log(logging.ERROR, message, *args, **kwargs)
def critical(self, message, *args, **kwargs):
self.log(logging.CRITICAL, message, *args, **kwargs)
def exception(self, message, *args, **kwargs):
self.log(logging.EXCEPTION, message, *args, **kwargs)
def warning(self, message, *args, **kwargs):
self.log(logging.EXCEPTION, message, *args, **kwargs)
warn = warning
def _make_extra(self):
result = dict(self.extra_proto, action=self._action)
try:
if hasattr(c, 'app') and c.app:
result['tool_type'] = c.app.config.tool_name
result['tool_mount'] = c.app.config.options['mount_point']
if hasattr(c, 'project') and c.project:
result['project'] = c.project.shortname
result['neighborhood'] = c.project.neighborhood.name
if hasattr(c, 'user') and c.user:
result['username'] = c.user.username
else:
result['username'] = '*system'
try:
result['url'] = request.url
result['ip_address'] = utils.ip_address(request)
except TypeError:
pass
return result
except:
self._logger.warning(
'Error logging to rtstats, some info may be missing', exc_info=True)
return result
def paging_sanitizer(limit, page, total_count=sys.maxint, zero_based_pages=True):
"""Return limit, page - both converted to int and constrained to
valid ranges based on total_count.
Useful for sanitizing limit and page query params.
"""
limit = max(int(limit), 1)
limit = min(limit, asint(tg.config.get('limit_param_max', 500)))
max_page = (total_count / limit) + (1 if total_count % limit else 0)
max_page = max(0, max_page - (1 if zero_based_pages else 0))
page = min(max(int(page or 0), (0 if zero_based_pages else 1)), max_page)
return limit, page
def _add_inline_line_numbers_to_text(txt):
markup_text = '<div class="codehilite"><pre>'
for line_num, line in enumerate(txt.splitlines(), 1):
markup_text = markup_text + \
'<span id="l%s" class="code_block"><span class="lineno">%s</span> %s</span>' % (
line_num, line_num, line)
markup_text = markup_text + '</pre></div>'
return markup_text
def _add_table_line_numbers_to_text(txt):
def _prepend_whitespaces(num, max_num):
num, max_num = str(num), str(max_num)
diff = len(max_num) - len(num)
return ' ' * diff + num
def _len_to_str_column(l, start=1):
max_num = l + start
return '\n'.join(map(_prepend_whitespaces, range(start, max_num), [max_num] * l))
lines = txt.splitlines(True)
linenumbers = '<td class="linenos"><div class="linenodiv"><pre>' + \
_len_to_str_column(len(lines)) + '</pre></div></td>'
markup_text = '<table class="codehilitetable"><tbody><tr>' + \
linenumbers + '<td class="code"><div class="codehilite"><pre>'
for line_num, line in enumerate(lines, 1):
markup_text = markup_text + \
'<span id="l%s" class="code_block">%s</span>' % (line_num, line)
markup_text = markup_text + '</pre></div></td></tr></tbody></table>'
return markup_text
INLINE = 'inline'
TABLE = 'table'
def render_any_markup(name, txt, code_mode=False, linenumbers_style=TABLE):
"""
renders markdown using allura enhacements if file is in markdown format
renders any other markup format using the pypeline
Returns jinja-safe text
"""
if txt == '':
txt = '<p><em>Empty File</em></p>'
else:
fmt = g.pypeline_markup.can_render(name)
if fmt == 'markdown':
txt = g.markdown.convert(really_unicode(txt))
else:
txt = g.pypeline_markup.render(name, txt)
if not fmt:
if code_mode and linenumbers_style == INLINE:
txt = _add_inline_line_numbers_to_text(txt)
elif code_mode and linenumbers_style == TABLE:
txt = _add_table_line_numbers_to_text(txt)
else:
txt = '<pre>%s</pre>' % txt
return Markup(txt)
# copied from jinja2 dev
# latest release, 2.6, implements this incorrectly
# can remove and use jinja2 implementation after upgrading to 2.7
def do_filesizeformat(value, binary=False):
"""Format the value like a 'human-readable' file size (i.e. 13 kB,
4.1 MB, 102 Bytes, etc). Per default decimal prefixes are used (Mega,
Giga, etc.), if the second parameter is set to `True` the binary
prefixes are used (Mebi, Gibi).
"""
bytes = float(value)
base = binary and 1024 or 1000
prefixes = [
(binary and 'KiB' or 'kB'),
(binary and 'MiB' or 'MB'),
(binary and 'GiB' or 'GB'),
(binary and 'TiB' or 'TB'),
(binary and 'PiB' or 'PB'),
(binary and 'EiB' or 'EB'),
(binary and 'ZiB' or 'ZB'),
(binary and 'YiB' or 'YB')
]
if bytes == 1:
return '1 Byte'
elif bytes < base:
return '%d Bytes' % bytes
else:
for i, prefix in enumerate(prefixes):
unit = base ** (i + 2)
if bytes < unit:
return '%.1f %s' % ((base * bytes / unit), prefix)
return '%.1f %s' % ((base * bytes / unit), prefix)
def nl2br_jinja_filter(value):
result = '<br>\n'.join(escape(line) for line in value.split('\n'))
return Markup(result)
def log_if_changed(artifact, attr, new_val, message):
"""Set `artifact.attr` to `new_val` if changed. Add AuditLog record."""
from allura import model as M
if not hasattr(artifact, attr):
return
if getattr(artifact, attr) != new_val:
M.AuditLog.log(message)
setattr(artifact, attr, new_val)
def get_tool_packages(tool_name):
"Return package for given tool (e.g. 'forgetracker' for 'tickets')"
from allura.app import Application
app = g.entry_points['tool'].get(tool_name.lower())
if not app:
return []
classes = [c for c in app.mro() if c not in (Application, object)]
return [cls.__module__.split('.')[0] for cls in classes]
def get_first(d, key):
"""Return value for d[key][0] if d[key] is a list with elements, else return d[key].
Useful to retrieve values from solr index (e.g. `title` and `text` fields),
which are stored as lists.
"""
v = d.get(key)
if isinstance(v, list):
return v[0] if len(v) > 0 else None
return v
def datetimeformat(value, format='%Y-%m-%d %H:%M:%S'):
return value.strftime(format)
@contextmanager
def log_output(log):
class Writer(object):
def __init__(self, func):
self.func = func
self.closed = False
def write(self, buf):
self.func(buf)
def flush(self):
pass
_stdout = sys.stdout
_stderr = sys.stderr
sys.stdout = Writer(log.info)
sys.stderr = Writer(log.error)
try:
yield log
finally:
sys.stdout = _stdout
sys.stderr = _stderr
def topological_sort(items, partial_order):
"""Perform topological sort.
items is a list of items to be sorted.
partial_order is a list of pairs. If pair (a,b) is in it, it means
that item a should appear before item b.
Returns a list of the items in one of the possible orders, or None
if partial_order contains a loop.
Modified from: http://www.bitformation.com/art/python_toposort.html
"""
# Original topological sort code written by Ofer Faigon
# (www.bitformation.com) and used with permission
def add_arc(graph, fromnode, tonode):
"""Add an arc to a graph. Can create multiple arcs.
The end nodes must already exist."""
graph[fromnode].append(tonode)
# Update the count of incoming arcs in tonode.
graph[tonode][0] = graph[tonode][0] + 1
# step 1 - create a directed graph with an arc a->b for each input
# pair (a,b).
# The graph is represented by a dictionary. The dictionary contains
# a pair item:list for each node in the graph. /item/ is the value
# of the node. /list/'s 1st item is the count of incoming arcs, and
# the rest are the destinations of the outgoing arcs. For example:
# {'a':[0,'b','c'], 'b':[1], 'c':[1]}
# represents the graph: c <-- a --> b
# The graph may contain loops and multiple arcs.
# Note that our representation does not contain reference loops to
# cause GC problems even when the represented graph contains loops,
# because we keep the node names rather than references to the nodes.
graph = defaultdict(lambda: [0])
for a, b in partial_order:
add_arc(graph, a, b)
# Step 2 - find all roots (nodes with zero incoming arcs).
roots = [n for n in items if graph[n][0] == 0]
roots.reverse() # keep sort stable
# step 3 - repeatedly emit a root and remove it from the graph. Removing
# a node may convert some of the node's direct children into roots.
# Whenever that happens, we append the new roots to the list of
# current roots.
sorted = []
while roots:
# If len(roots) is always 1 when we get here, it means that
# the input describes a complete ordering and there is only
# one possible output.
# When len(roots) > 1, we can choose any root to send to the
# output; this freedom represents the multiple complete orderings
# that satisfy the input restrictions. We arbitrarily take one of
# the roots using pop(). Note that for the algorithm to be efficient,
# this operation must be done in O(1) time.
root = roots.pop()
sorted.append(root)
for child in graph[root][1:]:
graph[child][0] = graph[child][0] - 1
if graph[child][0] == 0:
roots.append(child)
del graph[root]
if len(graph) > 0:
# There is a loop in the input.
return None
return sorted
@contextmanager
def ming_config(**conf):
"""Temporarily swap in a new ming configuration, restoring the previous
one when the contextmanager exits.
:param \*\*conf: keyword arguments defining the new ming configuration
"""
import ming
from ming.session import Session
datastores = Session._datastores
try:
ming.configure(**conf)
yield
finally:
Session._datastores = datastores
for name, session in Session._registry.iteritems():
session.bind = datastores.get(name, None)
session._name = name
@contextmanager
def ming_config_from_ini(ini_path):
"""Temporarily swap in a new ming configuration, restoring the previous
one when the contextmanager exits.
:param ini_path: Path to ini file containing the ming configuration
"""
root = pkg_resources.get_distribution('allura').location
conf = appconfig('config:%s' % os.path.join(root, ini_path))
with ming_config(**conf):
yield
def split_select_field_options(field_options):
try:
# shlex have problems with parsing unicode,
# it's better to pass properly encoded byte-string
field_options = shlex.split(field_options.encode('utf-8'))
# convert splitted string back to unicode
field_options = map(really_unicode, field_options)
except ValueError:
field_options = field_options.split()
# After regular split field_options might contain a " characters,
# which would break html when rendered inside tag's value attr.
# Escaping doesn't help here, 'cause it breaks EasyWidgets' validation,
# so we're getting rid of those.
field_options = [o.replace('"', '') for o in field_options]
return field_options
@contextmanager
def notifications_disabled(project, disabled=True):
"""Temporarily disable email notifications on a project.
"""
orig = project.notifications_disabled
try:
project.notifications_disabled = disabled
yield
finally:
project.notifications_disabled = orig
@contextmanager
def null_contextmanager(returning=None, *args, **kw):
"""A no-op contextmanager.
"""
yield returning
class exceptionless(object):
'''Decorator making the decorated function return 'error_result' on any
exceptions rather than propagating exceptions up the stack
'''
def __init__(self, error_result, log=None):
self.error_result = error_result
self.log = log
def __call__(self, fun):
fname = 'exceptionless(%s)' % fun.__name__
def inner(*args, **kwargs):
try:
return fun(*args, **kwargs)
except Exception as e:
if self.log:
self.log.exception(
'Error calling %s(args=%s, kwargs=%s): %s',
fname, args, kwargs, str(e))
return self.error_result
inner.__name__ = fname
return inner
def urlopen(url, retries=3, codes=(408, 500, 502, 503, 504), timeout=None):
"""Open url, optionally retrying if an error is encountered.
Socket and other IO errors will always be retried if retries > 0.
HTTP errors are retried if the error code is passed in ``codes``.
:param retries: Number of time to retry.
:param codes: HTTP error codes that should be retried.
"""
attempts = 0
while True:
try:
return urllib2.urlopen(url, timeout=timeout)
except IOError as e:
no_retry = isinstance(e, urllib2.HTTPError) and e.code not in codes
if attempts < retries and not no_retry:
attempts += 1
continue
else:
try:
url_string = url.get_full_url() # if url is Request obj
except Exception:
url_string = url
if timeout is None:
timeout = socket.getdefaulttimeout()
log.exception(
'Failed after %s retries on url with a timeout of %s: %s: %s',
attempts, timeout, url_string, e)
raise e
def plain2markdown(txt, preserve_multiple_spaces=False, has_html_entities=False):
if not has_html_entities:
# prevent &foo; and { from becoming HTML entities
txt = re_amp.sub('&', txt)
# avoid accidental 4-space indentations creating code blocks
if preserve_multiple_spaces:
txt = txt.replace('\t', ' ' * 4)
txt = re_preserve_spaces.sub(' ', txt)
else:
txt = re_leading_spaces.sub('', txt)
try:
# try to use html2text for most of the escaping
import html2text
html2text.BODY_WIDTH = 0
txt = html2text.escape_md_section(txt, snob=True)
except ImportError:
# fall back to just escaping any MD-special chars
txt = md_chars_matcher_all.sub(r"\\\1", txt)
# prevent < and > from becoming tags
txt = re_angle_bracket_open.sub('<', txt)
txt = re_angle_bracket_close.sub('>', txt)
return txt
def iter_entry_points(group, *a, **kw):
"""Yields entry points that have not been disabled in the config.
If ``group`` is "allura" (Allura tool entry points) or one of subgroups
(e.g. "allura.phone"), this function also checks for multiple entry points
with the same name. If there are multiple entry points with the same name,
and one of them is a subclass of the other(s), it will be yielded, and the
other entry points with that name will be ignored. If a subclass is not
found, an ImportError will be raised.
This treatment of "allura" and "allura.*" entry points allows tool authors
to subclass another tool while reusing the original entry point name.
"""
def active_eps():
disabled = aslist(
tg.config.get('disable_entry_points.' + group), sep=',')
return [ep for ep in pkg_resources.iter_entry_points(group, *a, **kw)
if ep.name not in disabled]
def unique_eps(entry_points):
by_name = defaultdict(list)
for ep in entry_points:
by_name[ep.name].append(ep)
for name, eps in by_name.iteritems():
ep_count = len(eps)
if ep_count == 1:
yield eps[0]
else:
yield subclass(eps)
def subclass(entry_points):
loaded = dict((ep, ep.load()) for ep in entry_points)
for ep, cls in loaded.iteritems():
others = loaded.values()[:]
others.remove(cls)
if all([issubclass(cls, other) for other in others]):
return ep
raise ImportError('Ambiguous [allura] entry points detected. ' +
'Multiple entry points with name "%s".' % entry_points[0].name)
is_allura = group == 'allura' or group.startswith('allura.')
return iter(unique_eps(active_eps()) if is_allura else active_eps())
# http://stackoverflow.com/a/1060330/79697
def daterange(start_date, end_date):
for n in range(int((end_date - start_date).days)):
yield start_date + timedelta(n)
@contextmanager
def login_overlay(exceptions=None):
"""
Override the default behavior of redirecting to the auth.login_url and
instead display an overlay with content from auth.login_fragment_url.
This is to allow pages that require authentication for any actions but
not for the initial view to be more apparent what you will get once
logged in.
This should be wrapped around call to `require_access()` (presumably in
the `_check_security()` method on a controller). The `exceptions` param
can be given a list of exposed views to leave with the original behavior.
For example::
class MyController(BaseController);
def _check_security(self):
with login_overlay(exceptions=['process']):
require_access(self.neighborhood, 'register')
@expose
def index(self, *args, **kw):
return {}
@expose
def list(self, *args, **kw):
return {}
@expose
def process(self, *args, **kw):
return {}
This would show the overlay to unauthenticated users who visit `/`
or `/list` but would perform the normal redirect when `/process` is
visited.
"""
try:
yield
except HTTPUnauthorized:
if exceptions:
for exception in exceptions:
if request.path.rstrip('/').endswith('/%s' % exception):
raise
c.show_login_overlay = True
def get_filter(ctx, filter_name):
"""
Gets a named Jinja2 filter, passing through
any context requested by the filter.
"""
filter_ = ctx.environment.filters[filter_name]
if getattr(filter_, 'contextfilter', False):
return partial(filter_, ctx)
elif getattr(filter_, 'evalcontextfilter', False):
return partial(filter_, ctx.eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
return partial(filter_, ctx.environment)
def unidiff(old, new):
"""Returns unified diff between `one` and `two`."""
return '\n'.join(difflib.unified_diff(
a=old.splitlines(),
b=new.splitlines(),
fromfile='old',
tofile='new',
lineterm=''))
def auditlog_user(message, *args, **kwargs):
"""
Create an audit log entry for a user, including the IP address
:param str message:
:param user: a :class:`allura.model.auth.User`
"""
from allura import model as M
ip_address = utils.ip_address(request)
message = 'IP Address: {}\nUser-Agent: {}\n'.format(ip_address, request.user_agent) + message
if c.user and kwargs.get('user') and kwargs['user'] != c.user:
message = 'Done by user: {}\n'.format(c.user.username) + message
return M.AuditLog.log_user(message, *args, **kwargs)
def get_user_status(user):
'''
Get user status based on disabled and pending attrs
:param user: a :class:`allura.model.auth.User`
'''
disabled = user.disabled
pending = user.pending
if not disabled and not pending:
return 'enabled'
elif disabled:
return 'disabled'
elif pending:
return 'pending'
def rate_limit(cfg_opt, artifact_count, start_date, exception=None):
"""
Check the various config-defined artifact creation rate limits, and if any
are exceeded, raise exception.
:param artifact_count: a number or callable (for lazy evaluation)
"""
if exception is None:
exception = exc.RatelimitError
rate_limits = json.loads(tg.config.get(cfg_opt, '{}'))
now = datetime.utcnow()
for rate, count in rate_limits.items():
age = now - start_date
age = (age.microseconds + (age.seconds + age.days * 24 * 3600) * 10 ** 6) / 10 ** 6
if age < int(rate):
if callable(artifact_count):
artifact_count = artifact_count()
if artifact_count >= count:
raise exception()
def base64uri(content_or_image, image_format='PNG', mimetype='image/png', windows_line_endings=False):
if hasattr(content_or_image, 'save'):
output = StringIO()
content_or_image.save(output, format=image_format)
content = output.getvalue()
else:
content = content_or_image
if windows_line_endings:
content = content.replace('\n', '\r\n')
data = base64.b64encode(content)
return 'data:{};base64,{}'.format(mimetype, data)
def slugify(name, allow_periods=False):
"""
Returns a tuple with slug and lowered slug based on name
"""
dash_collapse_pattern = r'[^.\w]+' if allow_periods else r'[^\w]+'
slug = re.sub(r'(^-)|(-$)', '', # leading - or trailing - gets removed
unicode(
re.sub(dash_collapse_pattern, '-', # replace non ". alphanum_" sequences into single -
re.sub(r"'", '', # remove any apostrophes
unicodedata.normalize('NFKD', name)
.encode('ascii', 'ignore')))
))
return slug, slug.lower()
email_re = re.compile(r'(([a-z0-9_]|\-|\.)+)@([\w\.-]+)', re.IGNORECASE)
def hide_private_info(message):
if asbool(tg.config.get('hide_private_info', 'true')) and message:
return email_re.sub(r'\1@...', message)
else:
return message
| heiths/allura | Allura/allura/lib/helpers.py | Python | apache-2.0 | 42,752 | 0.000678 |
# Python3
from solution1 import multiplicationTable as f
qa = [
(5,
[[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
[4, 8, 12, 16, 20],
[5, 10, 15, 20, 25]]),
(2,
[[1, 2],
[2, 4]]),
(4,
[[1, 2, 3, 4],
[2, 4, 6, 8],
[3, 6, 9, 12],
[4, 8, 12, 16]]),
(10,
[[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
[ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20],
[ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30],
[ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40],
[ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50],
[ 6, 12, 18, 24, 30, 36, 42, 48, 54, 60],
[ 7, 14, 21, 28, 35, 42, 49, 56, 63, 70],
[ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80],
[ 9, 18, 27, 36, 45, 54, 63, 72, 81, 90],
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]]),
(15,
[[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
[ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30],
[ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45],
[ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60],
[ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75],
[ 6, 12, 18, 24, 30, 36, 42, 48, 54, 60, 66, 72, 78, 84, 90],
[ 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98, 105],
[ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120],
[ 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99, 108, 117, 126, 135],
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150],
[11, 22, 33, 44, 55, 66, 77, 88, 99, 110, 121, 132, 143, 154, 165],
[12, 24, 36, 48, 60, 72, 84, 96, 108, 120, 132, 144, 156, 168, 180],
[13, 26, 39, 52, 65, 78, 91, 104, 117, 130, 143, 156, 169, 182, 195],
[14, 28, 42, 56, 70, 84, 98, 112, 126, 140, 154, 168, 182, 196, 210],
[15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225]])
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()
| RevansChen/online-judge | Codefights/arcade/python-arcade/level-5/34.Multiplication-Table/Python/test.py | Python | mit | 2,320 | 0.007759 |
#!/usr/bin/env python
# Cloudeebus
#
# Copyright (C) 2012 Intel Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Luc Yriarte <luc.yriarte@intel.com>
# Christophe Guiraud <christophe.guiraud@intel.com>
# Frederic Paut <frederic.paut@intel.com>
# Patrick Ohly <patrick.ohly@intel.com>
#
from setuptools import setup
setup(name = "cloudeebus",
version = "0.6.1",
description = "Javascript-DBus bridge",
author = "Luc Yriarte, Christophe Guiraud, Frederic Paut, Patrick Ohly",
author_email = "luc.yriarte@intel.com, christophe.guiraud@intel.com, frederic.paut@intel.com, patrick.ohly@intel.com",
url = "https://github.com/01org/cloudeebus/wiki",
license = "http://www.apache.org/licenses/LICENSE-2.0",
scripts = ["cloudeebus/cloudeebus.py","cloudeebus/cloudeebusengine.py"],
packages = ["cloudeebus"],
data_files = [("cloudeebus" ,["AUTHORS", "README.md", "LICENSE"]),
('/etc/dbus-1/system.d/', ['org.cloudeebus.conf'])],
platforms = ("Any"),
install_requires = ["setuptools", "autobahn==0.5.8"],
classifiers = ["License :: OSI Approved :: Apache Software License",
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Internet",
"Topic :: Software Development :: Libraries"],
keywords = "cloudeebus autobahn websocket dbus javascript bridge")
| 01org/cloudeebus | setup.py | Python | apache-2.0 | 1,949 | 0.040021 |
from __future__ import absolute_import
from django.conf import settings
from django.contrib import messages
from django.http import HttpResponseRedirect, HttpResponse
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from sentry import constants
from sentry.models import OrganizationMemberType
from sentry.plugins import plugins, NotificationPlugin
from sentry.web.forms.projects import NotificationSettingsForm
from sentry.web.frontend.base import ProjectView
OK_SETTINGS_SAVED = _('Your settings were saved successfully.')
class ProjectNotificationsView(ProjectView):
required_access = OrganizationMemberType.ADMIN
def _iter_plugins(self):
for plugin in plugins.all(version=1):
if not isinstance(plugin, NotificationPlugin):
continue
yield plugin
def _handle_enable_plugin(self, request, project):
plugin = plugins.get(request.POST['plugin'])
plugin.enable(project)
messages.add_message(
request, messages.SUCCESS,
constants.OK_PLUGIN_ENABLED.format(name=plugin.get_title()),
)
def _handle_disable_plugin(self, request, project):
plugin = plugins.get(request.POST['plugin'])
plugin.disable(project)
messages.add_message(
request, messages.SUCCESS,
constants.OK_PLUGIN_DISABLED.format(name=plugin.get_title()),
)
def handle(self, request, organization, team, project):
op = request.POST.get('op')
if op == 'enable':
self._handle_enable_plugin(request, project)
return HttpResponseRedirect(request.path)
elif op == 'disable':
self._handle_disable_plugin(request, project)
return HttpResponseRedirect(request.path)
if op == 'save-settings':
general_form = NotificationSettingsForm(
data=request.POST,
prefix='general',
initial={
'subject_prefix': project.get_option(
'mail:subject_prefix', settings.EMAIL_SUBJECT_PREFIX),
},
)
if general_form.is_valid():
project.update_option(
'mail:subject_prefix', general_form.cleaned_data['subject_prefix'])
messages.add_message(
request, messages.SUCCESS,
OK_SETTINGS_SAVED)
return HttpResponseRedirect(request.path)
else:
general_form = NotificationSettingsForm(
prefix='general',
initial={
'subject_prefix': project.get_option(
'mail:subject_prefix', settings.EMAIL_SUBJECT_PREFIX),
},
)
enabled_plugins = []
other_plugins = []
for plugin in self._iter_plugins():
if plugin.is_enabled(project):
content = plugin.get_notification_doc_html()
form = plugin.project_conf_form
if form is not None:
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
enabled_plugins.append((plugin, mark_safe(content + view)))
elif content:
enabled_plugins.append((plugin, mark_safe(content)))
else:
other_plugins.append(plugin)
context = {
'page': 'notifications',
'enabled_plugins': enabled_plugins,
'other_plugins': other_plugins,
'general_form': general_form,
}
return self.respond('sentry/project-notifications.html', context)
| Kryz/sentry | src/sentry/web/frontend/project_notifications.py | Python | bsd-3-clause | 3,802 | 0.000263 |
##########################################################################
#
# Copyright (c) 2016, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUITest
import GafferArnold
import GafferArnoldUI
class ArnoldShaderUITest( GafferUITest.TestCase ) :
def testMetadata( self ) :
shader = GafferArnold.ArnoldShader()
shader.loadShader( "noise" )
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["octaves"], "nodule:type" ),
""
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["amplitude"], "nodule:type" ),
"GafferUI::StandardNodule"
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["octaves"], "plugValueWidget:type" ),
None
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "plugValueWidget:type" ),
"GafferUI.PresetsPlugValueWidget"
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "presetNames" ),
IECore.StringVectorData( [ "world", "object", "Pref" ] ),
)
self.assertEqual(
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "presetValues" ),
Gaffer.Metadata.value( shader["parameters"]["coord_space"], "presetNames" ),
)
def testLightMetadata( self ) :
light = GafferArnold.ArnoldLight()
with IECore.CapturingMessageHandler() as mh :
light.loadShader( "skydome_light" )
## \todo Here we're suppressing warnings about not being
# able to create plugs for some parameters. In many cases
# these are parameters like "matrix" and "time_samples"
# that we don't actually want to represent anyway. We should
# add a mechanism for ignoring irrelevant parameters (perhaps
# using custom gaffer.something metadata in additional Arnold
# .mtd files), and then remove this suppression.
for message in mh.messages :
self.assertEqual( message.level, mh.Level.Warning )
self.assertTrue( "Unsupported parameter" in message.message )
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["cast_shadows"], "nodule:type" ),
""
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["color"], "nodule:type" ),
"GafferUI::StandardNodule"
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["format"], "plugValueWidget:type" ),
"GafferUI.PresetsPlugValueWidget"
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["format"], "presetNames" ),
IECore.StringVectorData( [ "mirrored_ball", "angular", "latlong" ] ),
)
self.assertEqual(
Gaffer.Metadata.value( light["parameters"]["format"], "presetValues" ),
Gaffer.Metadata.value( light["parameters"]["format"], "presetNames" ),
)
if __name__ == "__main__":
unittest.main()
| chippey/gaffer | python/GafferArnoldUITest/ArnoldShaderUITest.py | Python | bsd-3-clause | 4,413 | 0.027646 |
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A mixin class that delegates another Trackable to be used when saving.
This is intended to be used with wrapper classes that cannot directly proxy the
wrapped object (e.g. with wrapt.ObjectProxy), because there are inner attributes
that cannot be exposed.
The Wrapper class itself cannot contain any Trackable children, as only the
delegated Trackable will be saved to checkpoint and SavedModel.
This class will "disappear" and be replaced with the wrapped inner Trackable
after a cycle of SavedModel saving and loading, unless the object is registered
and loaded with Keras.
"""
from tensorflow.python.util.tf_export import tf_export
@tf_export("__internal__.tracking.DelegatingTrackableMixin", v1=[])
class DelegatingTrackableMixin(object):
"""A mixin that delegates all Trackable methods to another trackable object.
DO NOT USE THIS UNLESS YOU ARE THE KERAS LOSS SCALE OPTIMIZER.
This class must be used with multiple inheritance. A class that subclasses
Trackable can also subclass this class, which causes all Trackable methods to
be delegated to the trackable object passed in the constructor.
A subclass can use this mixin to appear as if it were the trackable passed to
the constructor, from a Checkpoint's perspective. LossScaleOptimizer uses this
mixin, so that the checkpoint format for a LossScaleOptimizer is identical to
the checkpoint format for a normal optimizer. This allows a model to be saved
with a normal Optimizer and restored with a LossScaleOptimizer, or vice versa.
The only difference in checkpoint format is that the loss scale is also saved
with a LossScaleOptimizer.
"""
def __init__(self, trackable_obj):
self._trackable = trackable_obj
# pylint: disable=protected-access
@property
def _setattr_tracking(self):
return self._trackable._setattr_tracking
@_setattr_tracking.setter
def _setattr_tracking(self, value):
self._trackable._setattr_tracking = value
@property
def _update_uid(self):
return self._trackable._update_uid
@_update_uid.setter
def _update_uid(self, value):
self._trackable._update_uid = value
@property
def _unconditional_checkpoint_dependencies(self):
return self._trackable._unconditional_checkpoint_dependencies
@property
def _unconditional_dependency_names(self):
return self._trackable._unconditional_dependency_names
@property
def _name_based_restores(self):
return self._trackable._name_based_restores
def _maybe_initialize_trackable(self):
return self._trackable._maybe_initialize_trackable()
@property
def _object_identifier(self):
return self._trackable._object_identifier
@property
def _tracking_metadata(self):
return self._trackable._tracking_metadata
def _no_dependency(self, *args, **kwargs):
return self._trackable._no_dependency(*args, **kwargs)
def _name_based_attribute_restore(self, *args, **kwargs):
return self._trackable._name_based_attribute_restore(*args, **kwargs)
@property
def _checkpoint_dependencies(self):
return self._trackable._checkpoint_dependencies
@property
def _deferred_dependencies(self):
return self._trackable._deferred_dependencies
def _lookup_dependency(self, *args, **kwargs):
return self._trackable._lookup_dependency(*args, **kwargs)
def _add_variable_with_custom_getter(self, *args, **kwargs):
return self._trackable._add_variable_with_custom_getter(*args, **kwargs)
def _preload_simple_restoration(self, *args, **kwargs):
return self._trackable._preload_simple_restoration(*args, **kwargs)
def _track_trackable(self, *args, **kwargs): # pylint: disable=redefined-outer-name
return self._trackable._track_trackable(*args, **kwargs)
def _handle_deferred_dependencies(self, name, trackable): # pylint: disable=redefined-outer-name
return self._trackable._handle_deferred_dependencies(name, trackable)
def _restore_from_checkpoint_position(self, checkpoint_position):
return self._trackable._restore_from_checkpoint_position(
checkpoint_position)
def _single_restoration_from_checkpoint_position(self, *args, **kwargs):
return self._trackable._single_restoration_from_checkpoint_position(
*args, **kwargs)
def _gather_saveables_for_checkpoint(self, *args, **kwargs):
return self._trackable._gather_saveables_for_checkpoint(*args, **kwargs)
def _list_extra_dependencies_for_serialization(self, *args, **kwargs):
return self._trackable._list_extra_dependencies_for_serialization(
*args, **kwargs)
def _list_functions_for_serialization(self, *args, **kwargs):
return self._trackable._list_functions_for_serialization(*args, **kwargs)
def _trackable_children(self, *args, **kwargs):
return self._trackable._trackable_children(*args, **kwargs)
def _deserialization_dependencies(self, *args, **kwargs):
return self._trackable._deserialization_dependencies(*args, **kwargs)
def _export_to_saved_model_graph(self, *args, **kwargs):
return self._trackable._export_to_saved_model_graph(*args, **kwargs)
# pylint: enable=protected-access
| tensorflow/tensorflow | tensorflow/python/training/tracking/base_delegate.py | Python | apache-2.0 | 5,796 | 0.008282 |
#! /usr/bin/env python3
from abc import ABCMeta, abstractmethod
import csv
import os
import re
import subprocess
import sys
import plaid2text.config_manager as cm
from plaid2text.interact import separator_completer, prompt
class Entry:
"""
This represents one entry (transaction) from Plaid.
"""
def __init__(self, transaction, options={}):
"""Parameters:
transaction: a plaid transaction
options: from CLI args and config file
"""
self.options = options
self.transaction = transaction
# TODO: document this
if 'addons' in options:
self.transaction['addons'] = dict(
(k, fields[v - 1]) for k, v in options.addons.items() # NOQA
)
else:
self.transaction['addons'] = {}
# The id for the transaction
self.transaction['transaction_id'] = self.transaction['transaction_id']
# Get the date and convert it into a ledger/beancount formatted date.
d8 = self.transaction['date']
d8_format = options.output_date_format if options and 'output_date_format' in options else '%Y-%m-%d'
self.transaction['transaction_date'] = d8.date().strftime(d8_format)
self.desc = self.transaction['name']
# amnt = self.transaction['amount']
self.transaction['currency'] = options.currency
# self.transaction['debit_amount'] = amnt
# self.transaction['debit_currency'] = currency
# self.transaction['credit_amount'] = ''
# self.transaction['credit_currency'] = ''
self.transaction['posting_account'] = options.posting_account
self.transaction['cleared_character'] = options.cleared_character
if options.template_file:
with open(options.template_file, 'r', encoding='utf-8') as f:
self.transaction['transaction_template'] = f.read()
else:
self.transaction['transaction_template'] = ''
def query(self):
"""
We print a summary of the record on the screen, and allow you to
choose the destination account.
"""
return '{0} {1:<40} {2}'.format(
self.transaction['date'],
self.desc,
self.transaction['amount']
)
def journal_entry(self, payee, account, tags):
"""
Return a formatted journal entry recording this Entry against
the specified posting account
"""
if self.options.output_format == 'ledger':
def_template = cm.DEFAULT_LEDGER_TEMPLATE
else:
def_template = cm.DEFAULT_BEANCOUNT_TEMPLATE
if self.transaction['transaction_template']:
template = (self.transaction['transaction_template'])
else:
template = (def_template)
if self.options.output_format == 'beancount':
ret_tags = ' {}'.format(tags) if tags else ''
else:
ret_tags = ' ; {}'.format(tags) if tags else ''
format_data = {
'associated_account': account,
'payee': payee,
'tags': ret_tags
}
format_data.update(self.transaction['addons'])
format_data.update(self.transaction)
return template.format(**format_data)
class OutputRenderer(metaclass=ABCMeta):
"""
Base class for output rendering.
"""
def __init__(self, transactions, options):
self.transactions = transactions
self.possible_accounts = set([])
self.possible_payees = set([])
self.possible_tags = set([])
self.mappings = []
self.map_file = options.mapping_file
self.read_mapping_file()
self.journal_file = options.journal_file
self.journal_lines = []
self.options = options
self.get_possible_accounts_and_payees()
# Add payees/accounts/tags from mappings
for m in self.mappings:
self.possible_payees.add(m[1])
self.possible_accounts.add(m[2])
if m[3]:
if options.output_format == 'ledger':
self.possible_tags.update(set(m[3][0].split(':')))
else:
self.possible_tags.update([t.replace('#', '') for t in m[3][0].split(' ')])
def read_mapping_file(self):
"""
Mappings are simply a CSV file with three columns.
The first is a string to be matched against an entry description.
The second is the payee against which such entries should be posted.
The third is the account against which such entries should be posted.
If the match string begins and ends with '/' it is taken to be a
regular expression.
"""
if not self.map_file:
return
with open(self.map_file, 'r', encoding='utf-8', newline='') as f:
map_reader = csv.reader(f)
for row in map_reader:
if len(row) > 1:
pattern = row[0].strip()
payee = row[1].strip()
account = row[2].strip()
tags = row[3:]
if pattern.startswith('/') and pattern.endswith('/'):
try:
pattern = re.compile(pattern[1:-1], re.I)
except re.error as e:
print(
"Invalid regex '{0}' in '{1}': {2}"
.format(pattern, self.map_file, e),
file=sys.stderr)
sys.exit(1)
self.mappings.append((pattern, payee, account, tags))
def append_mapping_file(self, desc, payee, account, tags):
if self.map_file:
with open(self.map_file, 'a', encoding='utf-8', newline='') as f:
writer = csv.writer(f)
ret_tags = tags if len(tags) > 0 else ''
writer.writerow([desc, payee, account, ret_tags])
def process_transactions(self, callback=None):
"""
Read transactions from Mongo (Plaid) and
process them. Writes Ledger/Beancount formatted
lines either to out_file or stdout.
Parameters:
callback: A function taking a single transaction update object to store
in the DB immediately after collecting the information from the user.
"""
out = self._process_plaid_transactions(callback=callback)
if self.options.headers_file:
headers = ''.join(open(self.options.headers_file, mode='r').readlines())
print(headers, file=self.options.outfile)
print(*self.journal_lines, sep='\n', file=self.options.outfile)
return out
def _process_plaid_transactions(self, callback=None):
"""Process plaid transaction and return beancount/ledger formatted
lines.
"""
out = []
for t in self.transactions:
entry = Entry(t, self.options)
payee, account, tags = self.get_payee_and_account(entry)
dic = {}
dic['transaction_id'] = t['transaction_id']
dic['tags'] = tags
dic['associated_account'] = account
dic['payee'] = payee
dic['posting_account'] = self.options.posting_account
out.append(dic)
# save the transactions into the database as they are processed
if callback: callback(dic)
self.journal_lines.append(entry.journal_entry(payee, account, tags))
return out
def prompt_for_value(self, text_prompt, values, default):
sep = ':' if text_prompt == 'Payee' else ' '
a = prompt(
'{} [{}]: '.format(text_prompt, default),
completer=separator_completer(values, sep=sep)
)
# Handle tag returning none if accepting
return a if (a or text_prompt == 'Tag') else default
def get_payee_and_account(self, entry):
payee = entry.desc
account = self.options.default_expense
tags = ''
found = False
# Try to match entry desc with mappings patterns
for m in self.mappings:
pattern = m[0]
if isinstance(pattern, str):
if entry.desc == pattern:
payee, account, tags = m[1], m[2], m[3]
found = True # do not break here, later mapping must win
else:
# If the pattern isn't a string it's a regex
if m[0].match(entry.desc):
payee, account, tags = m[1], m[2], m[3]
found = True
# Tags gets read in as a list, but just contains one string
if tags:
tags = tags[0]
modified = False
if self.options.quiet and found:
pass
else:
if self.options.clear_screen:
print('\033[2J\033[;H')
print('\n' + entry.query())
value = self.prompt_for_value('Payee', self.possible_payees, payee)
if value:
modified = modified if modified else value != payee
payee = value
value = self.prompt_for_value('Account', self.possible_accounts, account)
if value:
modified = modified if modified else value != account
account = value
if self.options.tags:
value = self.prompt_for_tags('Tag', self.possible_tags, tags)
if value:
modified = modified if modified else value != tags
tags = value
if not found or (found and modified):
# Add new or changed mapping to mappings and append to file
self.mappings.append((entry.desc, payee, account, tags))
self.append_mapping_file(entry.desc, payee, account, tags)
# Add new possible_values to possible values lists
self.possible_payees.add(payee)
self.possible_accounts.add(account)
return (payee, account, tags)
@abstractmethod
def tagify(self, value):
pass
@abstractmethod
def get_possible_accounts_and_payees(self):
pass
@abstractmethod
def prompt_for_tags(self, prompt, values, default):
pass
class LedgerRenderer(OutputRenderer):
def tagify(self, value):
if value.find(':') < 0 and value[0] != '[' and value[-1] != ']':
value = ':{0}:'.format(value.replace(' ', '-').replace(',', ''))
return value
def get_possible_accounts_and_payees(self):
if self.journal_file:
self.possible_payees = self._payees_from_ledger()
self.possible_accounts = self._accounts_from_ledger()
self.read_accounts_file()
def prompt_for_tags(self, prompt, values, default):
# tags = list(default[0].split(':'))
tags = [':{}:'.format(t) for t in default.split(':') if t] if default else []
value = self.prompt_for_value(prompt, values, ''.join(tags).replace('::', ':'))
while value:
if value[0] == '-':
value = self.tagify(value[1:])
if value in tags:
tags.remove(value)
else:
value = self.tagify(value)
if value not in tags:
tags.append(value)
value = self.prompt_for_value(prompt, values, ''.join(tags).replace('::', ':'))
return ''.join(tags).replace('::', ':')
def _payees_from_ledger(self):
return self._from_ledger('payees')
def _accounts_from_ledger(self):
return self._from_ledger('accounts')
def _from_ledger(self, command):
ledger = 'ledger'
for f in ['/usr/bin/ledger', '/usr/local/bin/ledger']:
if os.path.exists(f):
ledger = f
break
cmd = [ledger, '-f', self.journal_file, command]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout_data, stderr_data) = p.communicate()
items = set()
for item in stdout_data.decode('utf-8').splitlines():
items.add(item)
return items
def read_accounts_file(self):
""" Process each line in the specified account file looking for account
definitions. An account definition is a line containing the word
'account' followed by a valid account name, e.g:
account Expenses
account Expenses:Utilities
All other lines are ignored.
"""
if not self.options.accounts_file:
return
accounts = []
pattern = re.compile('^\s*account\s+([:A-Za-z0-9-_ ]+)$')
with open(self.options.accounts_file, 'r', encoding='utf-8') as f:
for line in f.readlines():
mo = pattern.match(line)
if mo:
accounts.append(mo.group(1))
self.possible_accounts.update(accounts)
class BeancountRenderer(OutputRenderer):
import beancount
def tagify(self, value):
# No spaces or commas allowed
return value.replace(' ', '-').replace(',', '')
def get_possible_accounts_and_payees(self):
if self.journal_file:
self._payees_and_accounts_from_beancount()
def _payees_and_accounts_from_beancount(self):
try:
payees = set()
accounts = set()
tags = set()
from beancount import loader
from beancount.core.data import Transaction, Open
import sys
entries, errors, options = loader.load_file(self.journal_file)
except Exception as e:
print(e.message, file=sys.stderr)
sys.exit(1)
else:
for e in entries:
if type(e) is Transaction:
if e.payee:
payees.add(e.payee)
if e.tags:
for t in e.tags:
tags.add(t)
if e.postings:
for p in e.postings:
accounts.add(p.account)
elif type(e) is Open:
accounts.add(e.account)
self.possible_accounts.update(accounts)
self.possible_tags.update(tags)
self.possible_payees.update(payees)
def prompt_for_tags(self, prompt, values, default):
tags = ' '.join(['#{}'.format(t) for t in default.split() if t]) if default else []
value = self.prompt_for_value(prompt, values, ' '.join(['#{}'.format(t) for t in tags]))
while value:
if value[0] == '-':
value = self.tagify(value[1:])
if value in tags:
tags.remove(value)
else:
value = self.tagify(value)
if value not in tags:
tags.append(value)
value = self.prompt_for_value(
prompt,
values,
' '.join(['#{}'.format(t) for t in tags])
)
return ' '.join(['#{}'.format(t) for t in tags])
| madhat2r/plaid2text | src/python/plaid2text/renderers.py | Python | gpl-3.0 | 15,244 | 0.00105 |
# Download the Python helper library from twilio.com/docs/python/install
import os
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/user/account
# To set up environmental variables, see http://twil.io/secure
account = os.environ['TWILIO_ACCOUNT_SID']
token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account, token)
service = client.chat.services("ISXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX").update(
friendly_name="NEW_FRIENDLY_NAME"
)
print(service.friendly_name)
| TwilioDevEd/api-snippets | ip-messaging/rest/services/update-service/update-service.6.x.py | Python | mit | 504 | 0 |
#!/usr/bin/python
"""
Resources:
http://code.google.com/p/pybluez/
http://lightblue.sourceforge.net/
http://code.google.com/p/python-bluetooth-scanner
"""
from __future__ import with_statement
import select
import logging
import bluetooth
import gobject
import util.misc as misc_utils
_moduleLogger = logging.getLogger(__name__)
class _BluetoothConnection(gobject.GObject):
__gsignals__ = {
'data_ready' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'closed' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
}
def __init__(self, socket, addr, protocol):
gobject.GObject.__init__(self)
self._socket = socket
self._address = addr
self._dataId = gobject.io_add_watch (self._socket, gobject.IO_IN, self._on_data)
self._protocol = protocol
def close(self):
gobject.source_remove(self._dataId)
self._dataId = None
self._socket.close()
self._socket = None
self.emit("closed")
@property
def socket(self):
return self._socket
@property
def address(self):
return self._address
@property
def protocol(self):
return self._protocol
@misc_utils.log_exception(_moduleLogger)
def _on_data(self, source, condition):
self.emit("data_ready")
return True
gobject.type_register(_BluetoothConnection)
class _BluetoothListener(gobject.GObject):
__gsignals__ = {
'incoming_connection' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, ),
),
'start_listening' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'stop_listening' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
}
def __init__(self, protocol, timeout):
gobject.GObject.__init__(self)
self._timeout = timeout
self._protocol = protocol
self._socket = None
self._incomingId = None
def start(self):
assert self._socket is None and self._incomingId is None
self._socket = bluetooth.BluetoothSocket(self._protocol["transport"])
self._socket.settimeout(self._timeout)
self._socket.bind(("", bluetooth.PORT_ANY))
self._socket.listen(1)
self._incomingId = gobject.io_add_watch(
self._socket, gobject.IO_IN, self._on_incoming
)
bluetooth.advertise_service(self._socket, self._protocol["name"], self._protocol["uuid"])
self.emit("start_listening")
def stop(self):
if self._socket is None or self._incomingId is None:
return
gobject.source_remove(self._incomingId)
self._incomingId = None
bluetooth.stop_advertising(self._socket)
self._socket.close()
self._socket = None
self.emit("stop_listening")
@property
def isListening(self):
return self._socket is not None and self._incomingId is not None
@property
def socket(self):
assert self._socket is not None
return self._socket
@misc_utils.log_exception(_moduleLogger)
def _on_incoming(self, source, condition):
newSocket, (address, port) = self._socket.accept()
newSocket.settimeout(self._timeout)
connection = _BluetoothConnection(newSocket, address, self._protocol)
self.emit("incoming_connection", connection)
return True
gobject.type_register(_BluetoothListener)
class _DeviceDiscoverer(bluetooth.DeviceDiscoverer):
def __init__(self, timeout):
bluetooth.DeviceDiscoverer.__init__(self)
self._timeout = timeout
self._devices = []
self._devicesInProgress = []
@property
def devices(self):
return self._devices
def find_devices(self, *args, **kwds):
# Ensure we always start clean and is the reason we overroad this
self._devicesInProgress = []
newArgs = [self]
newArgs.extend(args)
bluetooth.DeviceDiscoverer.find_devices(*newArgs, **kwds)
def process_inquiry(self):
# The default impl calls into some hci code but an example used select,
# so going with the example
while self.is_inquiring or 0 < len(self.names_to_find):
# The whole reason for overriding this
_moduleLogger.debug("Event (%r, %r)"% (self.is_inquiring, self.names_to_find))
rfds = select.select([self], [], [], self._timeout)[0]
if self in rfds:
self.process_event()
@misc_utils.log_exception(_moduleLogger)
def device_discovered(self, address, deviceclass, name):
device = address, deviceclass, name
_moduleLogger.debug("Device Discovered %r" % (device, ))
self._devicesInProgress.append(device)
@misc_utils.log_exception(_moduleLogger)
def inquiry_complete(self):
_moduleLogger.debug("Inquiry Complete")
self._devices = self._devicesInProgress
class BluetoothBackend(gobject.GObject):
__gsignals__ = {
'login' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'logout' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(),
),
'contacts_update' : (
gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, ),
),
}
def __init__(self):
gobject.GObject.__init__(self)
self._disco = None
self._timeout = 8
self._listeners = {}
self._protocols = []
self._isListening = True
def add_protocol(self, protocol):
assert not self.is_logged_in()
self._protocols.append(protocol)
def login(self):
self._disco = _DeviceDiscoverer(self._timeout)
isListening = self._isListening
for protocol in self._protocols:
protoId = protocol["uuid"]
self._listeners[protoId] = _BluetoothListener(protocol, self._timeout)
if isListening:
self._listeners[protoId].start()
self.emit("login")
def logout(self):
for protocol in self._protocols:
protoId = protocol["uuid"]
listener = self._listeners[protoId]
listener.close()
self._listeners.clear()
self._disco.cancel_inquiry() # precaution
self.emit("logout")
def is_logged_in(self):
if self._listeners:
return True
else:
return False
def is_listening(self):
return self._isListening
def enable_listening(self, enable):
if enable:
for listener in self._listeners.itervalues():
assert not listener.isListening
for listener in self._listeners.itervalues():
listener.start()
else:
for listener in self._listeners.itervalues():
assert listener.isListening
for listener in self._listeners.itervalues():
listener.stop()
def get_contacts(self):
try:
self._disco.find_devices(
duration=self._timeout,
flush_cache = True,
lookup_names = True,
)
self._disco.process_inquiry()
except bluetooth.BluetoothError, e:
# lightblue does this, so I guess I will too
_moduleLogger.error("Error while getting contacts, attempting to cancel")
try:
self._disco.cancel_inquiry()
finally:
raise e
return self._disco.devices
def get_contact_services(self, address):
services = bluetooth.find_service(address = address)
return services
def connect(self, addr, transport, port):
sock = bluetooth.BluetoothSocket(transport)
sock.settimeout(self._timeout)
try:
sock.connect((addr, port))
except bluetooth.error, e:
sock.close()
raise
return _BluetoothConnection(sock, addr, "")
gobject.type_register(BluetoothBackend)
class BluetoothClass(object):
def __init__(self, description):
self.description = description
def __str__(self):
return self.description
MAJOR_CLASS = BluetoothClass("Major Class")
MAJOR_CLASS.MISCELLANEOUS = BluetoothClass("Miscellaneous")
MAJOR_CLASS.COMPUTER = BluetoothClass("Computer")
MAJOR_CLASS.PHONE = BluetoothClass("Phone")
MAJOR_CLASS.LAN = BluetoothClass("LAN/Network Access Point")
MAJOR_CLASS.AV = BluetoothClass("Audio/Video")
MAJOR_CLASS.PERIPHERAL = BluetoothClass("Peripheral")
MAJOR_CLASS.IMAGING = BluetoothClass("Imaging")
MAJOR_CLASS.UNCATEGORIZED = BluetoothClass("Uncategorized")
MAJOR_CLASS.MISCELLANEOUS.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.COMPUTER.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.COMPUTER.DESKTOP = BluetoothClass("Desktop workstation")
MAJOR_CLASS.COMPUTER.SERVER = BluetoothClass("Server-class computer")
MAJOR_CLASS.COMPUTER.LAPTOP = BluetoothClass("Laptop")
MAJOR_CLASS.COMPUTER.HANDHELD = BluetoothClass("Handheld PC/PDA (clam shell)")
MAJOR_CLASS.COMPUTER.PALM_SIZE = BluetoothClass("Palm sized PC/PDA")
MAJOR_CLASS.COMPUTER.WEARABLE = BluetoothClass("Wearable computer (Watch sized)")
MAJOR_CLASS.COMPUTER.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.PHONE.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.PHONE.CELLULAR = BluetoothClass("Cellular")
MAJOR_CLASS.PHONE.CORDLESS = BluetoothClass("Cordless")
MAJOR_CLASS.PHONE.SMART_PHONE = BluetoothClass("Smart phone")
MAJOR_CLASS.PHONE.MODEM = BluetoothClass("Wired modem or voice gateway")
MAJOR_CLASS.PHONE.ISDN = BluetoothClass("Common ISDN Access")
MAJOR_CLASS.PHONE.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.LAN.UNCATEGORIZED = BluetoothClass("Uncategorized")
MAJOR_CLASS.LAN.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.AV.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.AV.HEADSET = BluetoothClass("Device conforms to headset profile")
MAJOR_CLASS.AV.HANDS_FREE = BluetoothClass("Hands-free")
MAJOR_CLASS.AV.MICROPHONE = BluetoothClass("Microphone")
MAJOR_CLASS.AV.LOUDSPEAKER = BluetoothClass("Loudspeaker")
MAJOR_CLASS.AV.HEADPHONES = BluetoothClass("Headphones")
MAJOR_CLASS.AV.PORTABLE_AUDIO = BluetoothClass("Portable Audio")
MAJOR_CLASS.AV.CAR_AUDIO = BluetoothClass("Car Audio")
MAJOR_CLASS.AV.SET_TOP_BOX = BluetoothClass("Set-top box")
MAJOR_CLASS.AV.HIFI_AUDIO_DEVICE = BluetoothClass("HiFi Audio Device")
MAJOR_CLASS.AV.VCR = BluetoothClass("VCR")
MAJOR_CLASS.AV.VIDEO_CAMERA = BluetoothClass("Video Camera")
MAJOR_CLASS.AV.CAMCORDER = BluetoothClass("Camcorder")
MAJOR_CLASS.AV.VIDEO_MONITOR = BluetoothClass("Video Monitor")
MAJOR_CLASS.AV.VIDEO_DISPLAY = BluetoothClass("Video Display and Loudspeaker")
MAJOR_CLASS.AV.VIDEO_CONFERENCING = BluetoothClass("Video Conferencing")
MAJOR_CLASS.AV.GAMING = BluetoothClass("Gaming/Toy")
MAJOR_CLASS.AV.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.PERIPHERAL.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.PERIPHERAL.JOYSTICK = BluetoothClass("Joystick")
MAJOR_CLASS.PERIPHERAL.GAMEPAD = BluetoothClass("Gamepad")
MAJOR_CLASS.PERIPHERAL.REMOTE_CONTROL = BluetoothClass("Remote Control")
MAJOR_CLASS.PERIPHERAL.SENSING_DEVICE = BluetoothClass("Sensing Device")
MAJOR_CLASS.PERIPHERAL.DIGITIZER_TABLET = BluetoothClass("Digitizer Tablet")
MAJOR_CLASS.PERIPHERAL.CARD_READER = BluetoothClass("Card Reader (e.g. SIM Card Reader)")
MAJOR_CLASS.PERIPHERAL.RESERVED = BluetoothClass("Reserved")
MAJOR_CLASS.IMAGING.UNCATEGORIZED = BluetoothClass("Uncategorized, code for device not assigned")
MAJOR_CLASS.IMAGING.DISPLAY = BluetoothClass("Display")
MAJOR_CLASS.IMAGING.CAMERA = BluetoothClass("Camera")
MAJOR_CLASS.IMAGING.SCANNER = BluetoothClass("Scanner")
MAJOR_CLASS.IMAGING.PRINTER = BluetoothClass("Printer")
MAJOR_CLASS.IMAGING.RESERVED = BluetoothClass("Reserved")
SERVICE_CLASS = BluetoothClass("Service Class")
SERVICE_CLASS.LIMITED = BluetoothClass("Limited Discoverable Mode")
SERVICE_CLASS.POSITIONING = BluetoothClass("Positioning (Location identification)")
SERVICE_CLASS.NETWORKING = BluetoothClass("Networking (LAN, Ad hoc, ...)")
SERVICE_CLASS.RENDERING = BluetoothClass("Rendering (Printing, speaking, ...)")
SERVICE_CLASS.CAPTURING = BluetoothClass("Capturing (Scanner, microphone, ...)")
SERVICE_CLASS.OBJECT_TRANSFER = BluetoothClass("Object Transfer (v-Inbox, v-Folder, ...)")
SERVICE_CLASS.AUDIO = BluetoothClass("Audio (Speaker, Microphone, Headset service, ...")
SERVICE_CLASS.TELEPHONY = BluetoothClass("Telephony (Cordless telephony, Modem, Headset service, ...)")
SERVICE_CLASS.INFORMATION = BluetoothClass("Information (WEB-server, WAP-server, ...)")
_ORDERED_MAJOR_CLASSES = (
MAJOR_CLASS.MISCELLANEOUS,
MAJOR_CLASS.COMPUTER,
MAJOR_CLASS.PHONE,
MAJOR_CLASS.LAN,
MAJOR_CLASS.AV,
MAJOR_CLASS.PERIPHERAL,
MAJOR_CLASS.IMAGING,
)
_SERVICE_CLASSES = (
(13 - 13, SERVICE_CLASS.LIMITED),
(16 - 13, SERVICE_CLASS.POSITIONING),
(17 - 13, SERVICE_CLASS.NETWORKING),
(18 - 13, SERVICE_CLASS.RENDERING),
(19 - 13, SERVICE_CLASS.CAPTURING),
(20 - 13, SERVICE_CLASS.OBJECT_TRANSFER),
(21 - 13, SERVICE_CLASS.AUDIO),
(22 - 13, SERVICE_CLASS.TELEPHONY),
(23 - 13, SERVICE_CLASS.INFORMATION),
)
def _parse_device_class(deviceclass):
# get some information out of the device class and display it.
# voodoo magic specified at:
#
# https://www.bluetooth.org/foundry/assignnumb/document/baseband
majorClass = (deviceclass >> 8) & 0xf
minorClass = (deviceclass >> 2) & 0x3f
serviceClasses = (deviceclass >> 13) & 0x7ff
return majorClass, minorClass, serviceClasses
def parse_device_class(deviceclass):
majorClassCode, minorClassCode, serviceClassCodes = _parse_device_class(deviceclass)
try:
majorClass = _ORDERED_MAJOR_CLASSES[majorClassCode]
except IndexError:
majorClass = MAJOR_CLASS.UNCATEGORIZED
serviceClasses = []
for bitpos, cls in _SERVICE_CLASSES:
if serviceClassCodes & (1 << bitpos):
serviceClasses.append(cls)
return majorClass, minorClassCode, serviceClasses
| epage/telepathy-bluewire | src/protocol/backend.py | Python | lgpl-2.1 | 13,051 | 0.022067 |
# coding: utf-8
# Copyright 2014-2015 Álvaro Justen <https://github.com/turicas/rows/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import tempfile
import unittest
import rows
import rows.plugins.txt
import utils
class PluginTxtTestCase(utils.RowsTestMixIn, unittest.TestCase):
filename = 'tests/data/all-field-types.txt'
encoding = 'utf-8'
def test_imports(self):
self.assertIs(rows.export_to_txt, rows.plugins.txt.export_to_txt)
def test_export_to_txt_filename(self):
temp = tempfile.NamedTemporaryFile(delete=False)
self.files_to_delete.append(temp.name)
rows.export_to_txt(utils.table, temp.name)
self.assert_file_contents_equal(temp.name, self.filename)
def test_export_to_txt_fobj(self):
# TODO: may test with codecs.open passing an encoding
temp = tempfile.NamedTemporaryFile(delete=False)
self.files_to_delete.append(temp.name)
rows.export_to_txt(utils.table, temp.file)
self.assert_file_contents_equal(temp.name, self.filename)
def test_export_to_txt_fobj_some_fields_only(self):
# TODO: this test may be inside `tests_operations.py` (testing
# `serialize` instead a plugin which calls it)
temp = tempfile.NamedTemporaryFile(delete=False)
self.files_to_delete.append(temp.name)
fobj = temp.file
rows.export_to_txt(utils.table, temp.file) # all fields
fobj.seek(0)
table_fields = utils.table.fields.keys()
expected_fields = table_fields
_, second_line = fobj.readline(), fobj.readline()
fields = [field.strip() for field in second_line.split('|')
if field.strip()]
self.assertEqual(expected_fields, fields)
expected_fields = table_fields[2:5]
self.assertNotEqual(expected_fields, table_fields)
fobj.seek(0)
rows.export_to_txt(utils.table, temp.file, field_names=expected_fields)
fobj.seek(0)
_, second_line = fobj.readline(), fobj.readline()
fields = [field.strip() for field in second_line.split('|')
if field.strip()]
self.assertEqual(expected_fields, fields)
| tilacog/rows | tests/tests_plugin_txt.py | Python | gpl-3.0 | 2,876 | 0.000696 |
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import profiles.urls
import accounts.urls
from . import views
urlpatterns = [
url(r'^$', views.HomePage.as_view(), name='home'),
url(r'^users/', include(profiles.urls, namespace='profiles')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include(accounts.urls, namespace='accounts')),
url(r'^post_url/$', views.HomePage.as_view(), name='post')
]
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Include django debug toolbar if DEBUG is on
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| Zedmor/powerball | src/powerball/urls.py | Python | mit | 852 | 0 |
#!/usr/bin/python
#
# Copyright (C) 2009, 2011 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for unittesting the mcpu module"""
import unittest
import itertools
from ganeti import compat
from ganeti import mcpu
from ganeti import opcodes
from ganeti import cmdlib
from ganeti import locking
from ganeti import constants
from ganeti.constants import \
LOCK_ATTEMPTS_TIMEOUT, \
LOCK_ATTEMPTS_MAXWAIT, \
LOCK_ATTEMPTS_MINWAIT
import testutils
REQ_BGL_WHITELIST = compat.UniqueFrozenset([
opcodes.OpClusterActivateMasterIp,
opcodes.OpClusterDeactivateMasterIp,
opcodes.OpClusterDestroy,
opcodes.OpClusterPostInit,
opcodes.OpClusterRename,
opcodes.OpInstanceRename,
opcodes.OpNodeAdd,
opcodes.OpNodeRemove,
opcodes.OpTestAllocator,
])
class TestLockAttemptTimeoutStrategy(unittest.TestCase):
def testConstants(self):
tpa = mcpu.LockAttemptTimeoutStrategy._TIMEOUT_PER_ATTEMPT
self.assert_(len(tpa) > LOCK_ATTEMPTS_TIMEOUT / LOCK_ATTEMPTS_MAXWAIT)
self.assert_(sum(tpa) >= LOCK_ATTEMPTS_TIMEOUT)
self.assertTrue(LOCK_ATTEMPTS_TIMEOUT >= 1800,
msg="Waiting less than half an hour per priority")
self.assertTrue(LOCK_ATTEMPTS_TIMEOUT <= 3600,
msg="Waiting more than an hour per priority")
def testSimple(self):
strat = mcpu.LockAttemptTimeoutStrategy(_random_fn=lambda: 0.5,
_time_fn=lambda: 0.0)
prev = None
for i in range(len(strat._TIMEOUT_PER_ATTEMPT)):
timeout = strat.NextAttempt()
self.assert_(timeout is not None)
self.assert_(timeout <= LOCK_ATTEMPTS_MAXWAIT)
self.assert_(timeout >= LOCK_ATTEMPTS_MINWAIT)
self.assert_(prev is None or timeout >= prev)
prev = timeout
for _ in range(10):
self.assert_(strat.NextAttempt() is None)
class TestDispatchTable(unittest.TestCase):
def test(self):
for opcls in opcodes.OP_MAPPING.values():
if not opcls.WITH_LU:
continue
self.assertTrue(opcls in mcpu.Processor.DISPATCH_TABLE,
msg="%s missing handler class" % opcls)
# Check against BGL whitelist
lucls = mcpu.Processor.DISPATCH_TABLE[opcls]
if lucls.REQ_BGL:
self.assertTrue(opcls in REQ_BGL_WHITELIST,
msg=("%s not whitelisted for BGL" % opcls.OP_ID))
else:
self.assertFalse(opcls in REQ_BGL_WHITELIST,
msg=("%s whitelisted for BGL, but doesn't use it" %
opcls.OP_ID))
class TestProcessResult(unittest.TestCase):
def setUp(self):
self._submitted = []
self._count = itertools.count(200)
def _Submit(self, jobs):
job_ids = [self._count.next() for _ in jobs]
self._submitted.extend(zip(job_ids, jobs))
return job_ids
def testNoJobs(self):
for i in [object(), [], False, True, None, 1, 929, {}]:
self.assertEqual(mcpu._ProcessResult(NotImplemented, NotImplemented, i),
i)
def testDefaults(self):
src = opcodes.OpTestDummy()
res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[
opcodes.OpTestDelay(),
opcodes.OpTestDelay(),
], [
opcodes.OpTestDelay(),
]]))
self.assertEqual(res, {
constants.JOB_IDS_KEY: [200, 201],
})
(_, (op1, op2)) = self._submitted.pop(0)
(_, (op3, )) = self._submitted.pop(0)
self.assertRaises(IndexError, self._submitted.pop)
for op in [op1, op2, op3]:
self.assertTrue("OP_TEST_DUMMY" in op.comment)
self.assertFalse(hasattr(op, "priority"))
self.assertFalse(hasattr(op, "debug_level"))
def testParams(self):
src = opcodes.OpTestDummy(priority=constants.OP_PRIO_HIGH,
debug_level=3)
res = mcpu._ProcessResult(self._Submit, src, cmdlib.ResultWithJobs([[
opcodes.OpTestDelay(priority=constants.OP_PRIO_LOW),
], [
opcodes.OpTestDelay(comment="foobar", debug_level=10),
]], other=True, value=range(10)))
self.assertEqual(res, {
constants.JOB_IDS_KEY: [200, 201],
"other": True,
"value": range(10),
})
(_, (op1, )) = self._submitted.pop(0)
(_, (op2, )) = self._submitted.pop(0)
self.assertRaises(IndexError, self._submitted.pop)
self.assertEqual(op1.priority, constants.OP_PRIO_LOW)
self.assertTrue("OP_TEST_DUMMY" in op1.comment)
self.assertEqual(op1.debug_level, 3)
self.assertEqual(op2.priority, constants.OP_PRIO_HIGH)
self.assertEqual(op2.comment, "foobar")
self.assertEqual(op2.debug_level, 3)
class _FakeLuWithLocks:
def __init__(self, needed_locks, share_locks):
self.needed_locks = needed_locks
self.share_locks = share_locks
class _FakeGlm:
def __init__(self, owning_nal):
self._owning_nal = owning_nal
def check_owned(self, level, names):
assert level == locking.LEVEL_NODE_ALLOC
assert names == locking.NAL
return self._owning_nal
def owning_all(self, level):
return False
class TestVerifyLocks(unittest.TestCase):
def testNoLocks(self):
lu = _FakeLuWithLocks({}, {})
glm = _FakeGlm(False)
mcpu._VerifyLocks(lu, glm,
_mode_whitelist=NotImplemented,
_nal_whitelist=NotImplemented)
def testNotAllSameMode(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: ["foo"],
}, {
level: 0,
locking.LEVEL_NODE_ALLOC: 0,
})
glm = _FakeGlm(False)
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
def testDifferentMode(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: ["foo"],
}, {
level: 0,
locking.LEVEL_NODE_ALLOC: 1,
})
glm = _FakeGlm(False)
try:
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
except AssertionError, err:
self.assertTrue("using the same mode as nodes" in str(err))
else:
self.fail("Exception not raised")
# Once more with the whitelist
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks],
_nal_whitelist=[])
def testSameMode(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: ["foo"],
locking.LEVEL_NODE_ALLOC: locking.ALL_SET,
}, {
level: 1,
locking.LEVEL_NODE_ALLOC: 1,
})
glm = _FakeGlm(True)
try:
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[_FakeLuWithLocks],
_nal_whitelist=[])
except AssertionError, err:
self.assertTrue("whitelisted to use different modes" in str(err))
else:
self.fail("Exception not raised")
# Once more without the whitelist
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
def testAllWithoutAllocLock(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: locking.ALL_SET,
}, {
level: 0,
locking.LEVEL_NODE_ALLOC: 0,
})
glm = _FakeGlm(False)
try:
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
except AssertionError, err:
self.assertTrue("allocation lock must be used if" in str(err))
else:
self.fail("Exception not raised")
# Once more with the whitelist
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[],
_nal_whitelist=[_FakeLuWithLocks])
def testAllWithAllocLock(self):
for level in [locking.LEVEL_NODE, locking.LEVEL_NODE_RES]:
lu = _FakeLuWithLocks({
level: locking.ALL_SET,
locking.LEVEL_NODE_ALLOC: locking.ALL_SET,
}, {
level: 0,
locking.LEVEL_NODE_ALLOC: 0,
})
glm = _FakeGlm(True)
try:
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[],
_nal_whitelist=[_FakeLuWithLocks])
except AssertionError, err:
self.assertTrue("whitelisted for not acquiring" in str(err))
else:
self.fail("Exception not raised")
# Once more without the whitelist
mcpu._VerifyLocks(lu, glm, _mode_whitelist=[], _nal_whitelist=[])
if __name__ == "__main__":
testutils.GanetiTestProgram()
| apyrgio/snf-ganeti | test/py/ganeti.mcpu_unittest.py | Python | bsd-2-clause | 9,694 | 0.006911 |
#!/usr/bin/env python
import time
makefile = '''
{
"rules":
[
{
"inputs": [ "source1" ],
"outputs": [ "output" ],
"cmd": "cat source1 > output && cat source2 >> output && echo 'output: source1 source2' > deps",
"depfile": "deps"
}
]
}
'''
def set_version_1(test):
test.write_file("source1", "1")
test.write_file("source2", "2")
def set_version_2(test):
test.write_file("source1", "2")
test.write_file("source2", "3")
def run(test):
test.create_makefile(makefile)
set_version_1(test)
test.start()
assert(set(["source1", "output"]) == set(test.get_dirty_targets()))
test.build()
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '12')
assert(set(["source1", "source2"]) == set(test.get_inputs_of("output")))
assert(set(["output"]) == set(test.get_outputs_of("source2")))
set_version_2(test)
test.expect_watchman_trigger("source1")
test.expect_watchman_trigger("source2")
assert(set(["source1", "source2", "output"]) == set(test.get_dirty_targets()))
test.build()
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '23')
set_version_1(test)
test.expect_watchman_trigger("source1")
test.expect_watchman_trigger("source2")
assert(set(["source1", "source2", "output"]) == set(test.get_dirty_targets()))
# Build and check we retrieve output from the cache
data = test.build()
assert(len(data['cmds']) == 1)
assert(data['cmds'][0] == { 'cache' : 'output' })
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '12')
set_version_2(test)
test.expect_watchman_trigger("source1")
test.expect_watchman_trigger("source2")
assert(set(["source1", "source2", "output"]) == set(test.get_dirty_targets()))
# Build and check we retrieve output from the cache
data = test.build()
assert(len(data['cmds']) == 1)
assert(data['cmds'][0] == { 'cache' : 'output' })
assert(test.get_dirty_targets() == [])
assert(test.get_file_content('output') == '23')
| falcon-org/Falcon | test/TestCache.py | Python | bsd-3-clause | 2,048 | 0.025879 |
#!/usr/bin/env python
#coding:utf-8
# Author: --<qingfengkuyu>
# Purpose: MongoDB的使用
# Created: 2014/4/14
#32位的版本最多只能存储2.5GB的数据(NoSQLFan:最大文件尺寸为2G,生产环境推荐64位)
import pymongo
import datetime
import random
#创建连接
conn = pymongo.MongoClient('localhost',27017)
#连接数据库
db = conn.study
#db = conn['study']
#打印所有聚集名称,连接聚集
print u'所有聚集:',db.collection_names()
posts = db.post
#posts = db['post']
print posts
#插入记录
new_post = {"AccountID":22,"UserName":"libing",'date':datetime.datetime.now()}
new_posts = [{"AccountID":22,"UserName":"liuw",'date':datetime.datetime.now()},
{"AccountID":23,"UserName":"urling",'date':datetime.datetime.now()}]#每条记录插入时间都不一样
posts.insert(new_post)
#posts.insert(new_posts)#批量插入多条数据
#删除记录
print u'删除指定记录:\n',posts.find_one({"AccountID":22,"UserName":"libing"})
posts.remove({"AccountID":22,"UserName":"libing"})
#修改聚集内的记录
posts.update({"UserName":"urling"},{"$set":{'AccountID':random.randint(20,50)}})
#查询记录,统计记录数量
print u'记录总计为:',posts.count(),posts.find().count()
print u'查询单条记录:\n',posts.find_one()
print posts.find_one({"UserName":"liuw"})
#查询所有记录
print u'查询多条记录:'
#for item in posts.find():#查询全部记录
#for item in posts.find({"UserName":"urling"}):#查询指定记录
#for item in posts.find().sort("UserName"):#查询结果根据UserName排序,默认为升序
#for item in posts.find().sort("UserName",pymongo.ASCENDING):#查询结果根据UserName排序,ASCENDING为升序,DESCENDING为降序
for item in posts.find().sort([("UserName",pymongo.ASCENDING),('date',pymongo.DESCENDING)]):#查询结果根据多列排序
print item
#查看查询语句的性能
#posts.create_index([("UserName", pymongo.ASCENDING), ("date", pymongo.DESCENDING)])#加索引
print posts.find().sort([("UserName",pymongo.ASCENDING),('date',pymongo.DESCENDING)]).explain()["cursor"]#未加索引用BasicCursor查询记录
print posts.find().sort([("UserName",pymongo.ASCENDING),('date',pymongo.DESCENDING)]).explain()["nscanned"]#查询语句执行时查询的记录数
| valley3405/testMongo01 | test02.py | Python | gpl-2.0 | 2,298 | 0.046386 |
import datetime
import io
import logging
import logging.handlers
import os
import sys
from collections import deque
from time import perf_counter
import colorlog
class LogHelper:
FORMATTER_COLOR = colorlog.ColoredFormatter('{log_color}{asctime} {name}: {levelname} {message}', style='{')
FORMATTER = logging.Formatter('{asctime} {name}: {levelname} {message}', style='{')
@classmethod
def generate_color_handler(cls, stream=sys.stdout):
handler = logging.StreamHandler(stream)
handler.setFormatter(cls.FORMATTER_COLOR)
return handler
@classmethod
def get_script_name(cls):
script_name = os.path.basename(sys.argv[0])
script_name, _ = os.path.splitext(script_name)
return script_name
@classmethod
def generate_simple_rotating_file_handler(cls, path_log_file=None, when='midnight', files_count=7):
if path_log_file is None:
path_dir = os.path.dirname(sys.argv[0])
path_log_file = cls.suggest_script_log_name(path_dir)
handler = logging.handlers.TimedRotatingFileHandler(path_log_file, when=when, backupCount=files_count)
handler.setLevel(logging.DEBUG)
handler.setFormatter(cls.FORMATTER)
return handler
@classmethod
def suggest_script_log_name(cls, path_dir):
return os.path.join(path_dir, cls.get_script_name() + '.log')
@staticmethod
def timestamp(with_ms=False, time=None):
if time is None:
time = datetime.datetime.now()
if with_ms:
return time.strftime('%Y%m%d_%H%M%S.%f')[:-3]
else:
return time.strftime('%Y%m%d_%H%M%S')
class PerformanceMetric:
def __init__(self, *, n_samples=1000, units_suffix='', units_format='.2f', name=None):
super().__init__()
self.name: str = name
self.queue_samples = deque(maxlen=n_samples)
self.total = 0
self.last = 0
self.units_str = units_suffix
self.units_format = units_format
def reset(self):
self.total = 0
self.last = 0
self.queue_samples.clear()
@property
def n_samples(self):
return len(self.queue_samples)
def __str__(self):
str_name = f'[{self.name}] ' if self.name else ''
if self.n_samples == 0:
return f'{str_name}No measurements'
return '{}Average: {:{}} {}; Last: {:{}} {}; Samples: {};'.format(
str_name, self.average, self.units_format, self.units_str,
self.last, self.units_format, self.units_str,
self.n_samples
)
def last_str(self):
str_name = f'[{self.name}] ' if self.name else ''
return f'{str_name}{self.last:{self.units_format}} {self.units_str}'
@property
def average(self):
if self.n_samples == 0:
return None
return self.total / self.n_samples
def submit_sample(self, sample: float):
sample_popped = 0
if self.n_samples == self.queue_samples.maxlen:
sample_popped = self.queue_samples.popleft()
self.last = sample
self.total += self.last - sample_popped
self.queue_samples.append(self.last)
class PerformanceTimer(PerformanceMetric):
def __init__(self, n_samples=1000, units_format='.1f', **kwargs) -> None:
super().__init__(n_samples=n_samples, units_suffix='sec', units_format=units_format, **kwargs)
self.time_last_start = 0
def __enter__(self):
self.begin()
return self
def __exit__(self, t, value, tb):
self.end()
def begin(self):
self.time_last_start = perf_counter()
def end(self):
self.submit_sample(self.peek())
def peek(self):
return perf_counter() - self.time_last_start
class PrintStream:
"""
Shortcut for using `StringIO`
printf = PrintStream()
printf('Case Results:')
printf(...)
string = str(printf)
"""
def __init__(self, stream=None):
if not stream:
stream = io.StringIO()
self.stream = stream
def __call__(self, *args, **kwargs):
print(*args, file=self.stream, **kwargs)
def __str__(self):
return self.stream.getvalue()
| wolf1986/log_utils | log_utils/helper.py | Python | lgpl-3.0 | 4,265 | 0.001407 |
# Copyright (C) 2011 Lukas Lalinsky
# Distributed under the MIT license, see the LICENSE file for details.
import re
import syslog
from logging import Handler
from logging.handlers import SysLogHandler
class LocalSysLogHandler(Handler):
"""
Logging handler that logs to the local syslog using the syslog module
"""
facility_names = {
"auth": syslog.LOG_AUTH,
"cron": syslog.LOG_CRON,
"daemon": syslog.LOG_DAEMON,
"kern": syslog.LOG_KERN,
"lpr": syslog.LOG_LPR,
"mail": syslog.LOG_MAIL,
"news": syslog.LOG_NEWS,
"syslog": syslog.LOG_SYSLOG,
"user": syslog.LOG_USER,
"uucp": syslog.LOG_UUCP,
"local0": syslog.LOG_LOCAL0,
"local1": syslog.LOG_LOCAL1,
"local2": syslog.LOG_LOCAL2,
"local3": syslog.LOG_LOCAL3,
"local4": syslog.LOG_LOCAL4,
"local5": syslog.LOG_LOCAL5,
"local6": syslog.LOG_LOCAL6,
"local7": syslog.LOG_LOCAL7,
}
priority_map = {
"DEBUG": syslog.LOG_DEBUG,
"INFO": syslog.LOG_INFO,
"WARNING": syslog.LOG_WARNING,
"ERROR": syslog.LOG_ERR,
"CRITICAL": syslog.LOG_CRIT
}
def __init__(self, ident=None, facility=syslog.LOG_USER, log_pid=False):
Handler.__init__(self)
self.facility = facility
if isinstance(facility, basestring):
self.facility = self.facility_names[facility]
options = 0
if log_pid:
options |= syslog.LOG_PID
syslog.openlog(ident, options, self.facility)
self.formatter = None
def close(self):
Handler.close(self)
syslog.closelog()
def emit(self, record):
try:
msg = self.format(record)
if isinstance(msg, unicode):
msg = msg.encode('utf-8')
priority = self.priority_map[record.levelname]
for m in msg.splitlines():
syslog.syslog(self.facility | priority, m)
except StandardError:
self.handleError(record)
| lalinsky/mb2freedb | mb2freedb/utils.py | Python | mit | 2,120 | 0.000472 |
#! /usr/local/bin/stackless2.6
# by pts@fazekas.hu at Fri Jun 17 14:08:07 CEST 2011
"""Demo for hosting a gevent application with Stackless, without Syncless."""
__author__ = 'pts@fazekas.hu (Peter Szabo)'
import sys
# Import best_greenlet before gevent to add greenlet emulation for Stackless
# if necessary.
import syncless.best_greenlet
import gevent
import gevent.hub
import gevent.socket
class Lprng(object):
__slots__ = ['seed']
def __init__(self, seed=0):
self.seed = int(seed) & 0xffffffff
def next(self):
"""Generate a 32-bit unsigned random number."""
# http://en.wikipedia.org/wiki/Linear_congruential_generator
self.seed = (
((1664525 * self.seed) & 0xffffffff) + 1013904223) & 0xffffffff
return self.seed
def __iter__(self):
return self
def Worker(client_socket, addr):
print >>sys.stderr, 'info: connection from %r, handled by %r' % (
addr, gevent.hub.greenlet.getcurrent())
f = client_socket.makefile()
# Read HTTP request.
line1 = None
while True:
line = f.readline().rstrip('\r\n')
if not line: # Empty line, end of HTTP request.
break
if line1 is None:
line1 = line
# Parse HTTP request.
# Please note that an assertion here doesn't abort the server.
items = line1.split(' ')
assert 3 == len(items)
assert items[2] in ('HTTP/1.0', 'HTTP/1.1')
assert items[0] == 'GET'
assert items[1].startswith('/')
# This is to demonstrate the error reporting and recovery behavior of gevent:
# We get an error message like this, and the process execution continues:
#
# Traceback (most recent call last):
# File "/usr/local/lib/python2.6/site-packages/gevent/greenlet.py", line 388, in run
# result = self._run(*self.args, **self.kwargs)
# File "./s2.py", line 137, in Worker
# assert 'bad' not in items[1]
# AssertionError
# <Greenlet at 0xb71acbecL: Worker(<socket at 0xb747668cL fileno=10 sock=127.0.0.1:80, ('127.0.0.1', 55196))> failed with AssertionError
assert 'bad' not in items[1]
if 'sysexit' in items[1]:
print >>sys.stderr, 'info: exiting with SystemExit'
#sys.exit() # Doesn't work, gevent.core.__event_handler catches it.
gevent.hub.MAIN.throw(SystemExit)
if 'exit' in items[1]:
print >>sys.stderr, 'info: exiting with throw'
gevent.hub.MAIN.throw()
try:
num = int(items[1][1:])
except ValueError:
num = None
if 'slow' in items[1]:
gevent.hub.sleep(5)
# Write HTTP response.
if num is None:
f.write('HTTP/1.0 200 OK\r\nContent-Type: text/html\r\n\r\n')
f.write('<a href="/0">start at 0</a><p>Hello, World!\n')
else:
next_num = Lprng(num).next()
f.write('HTTP/1.0 200 OK\r\nContent-Type: text/html\r\n\r\n')
f.write('<a href="/%d">continue with %d</a>\n' %
(next_num, next_num))
#f.flush() # Not needed here.
def GeventListener(server_socket):
# Please note that exceptions raised here will be printed and then ignored
# by the gevent.hub main loop.
print >>sys.stderr, (
'info: accepting connections in %r' % gevent.hub.greenlet.getcurrent())
while True:
client_socket, addr = server_socket.accept()
gevent.spawn(Worker, client_socket, addr)
# Equally good:
#gevent.hub.spawn_raw(Worker, client_socket, addr)
client_socket = addr = None # Save memory.
if __name__ == '__main__':
# We need this patch so gevent.hub.spawn_raw below will create a greenlet
# of the correct type.
server_socket = gevent.socket.socket()
# Old:
# gevent.socket.set_reuse_addr(server_socket)
# server_socket.bind(('127.0.0.1', 8080))
# server_socket.listen(128)
gevent.socket.bind_and_listen(server_socket, ('127.0.0.1', 8080), 128,
reuse_addr=True)
print >>sys.stderr, 'listening on %r' % (server_socket.getsockname(),)
# All non-blocking gevent operations must be initiated from a greenlet
# invoked by the gevent hub. The easiest way to ensure that is to move these
# operations to a function (GeventListener), and call this function with
# gevent.hub.spawn_raw. (As a side effect, if an exception happens in that
# function, the process will continue running.)
gevent.hub.spawn_raw(GeventListener, server_socket)
# Run the gevent main loop indefinitely. This is not a requirement, we
# could to non-blocking Syncless operations instead right here for a long
# time.
syncless.best_greenlet.gevent_hub_main()
assert 0, 'unreached'
| HanWenfang/syncless | examples/demo_gevent_only.py | Python | apache-2.0 | 4,479 | 0.016522 |
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "pysymemu",
version = "0.0.1-alpha",
author = "Felipe Andres Manzano",
author_email = "feliam@binamuse.com",
description = ("A tool for symbolic execution of Intel 64 binaries."),
requires = ['pyelftool', 'capstone' ],
provides = ['pysymemu'],
license = "BSD",
url = 'http://github.com/pysymemu',
download_url= 'http://github.com/',
platforms = ['linux', 'win32', 'win64'],
keywords = "testing reverse enginering symbolic execution white box fuzzing automatic test case generation",
long_description=read('README.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Testing"
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
],
test_suite="test",
)
| feliam/pysymemu | setup.py | Python | bsd-3-clause | 1,306 | 0.022971 |
#!/usr/bin/env python
## \file configure.py
# \brief An extended configuration script.
# \author T. Albring
# \version 6.2.0 "Falcon"
#
# The current SU2 release has been coordinated by the
# SU2 International Developers Society <www.su2devsociety.org>
# with selected contributions from the open-source community.
#
# Copyright 2012-2019, Francisco D. Palacios, Thomas D. Economon,
# Tim Albring, and the SU2 contributors.
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, division, absolute_import
from optparse import OptionParser, BadOptionError
import sys,time, os, subprocess, os.path, glob, re, shutil, fileinput
from subprocess import call
# "Pass-through" option parsing -- an OptionParser that ignores
# unknown options and lets them pile up in the leftover argument
# list. Useful to pass unknown arguments to the automake configure.
class PassThroughOptionParser(OptionParser):
def _process_long_opt(self, rargs, values):
try:
OptionParser._process_long_opt(self, rargs, values)
except BadOptionError as err:
self.largs.append(err.opt_str)
def _process_short_opts(self, rargs, values):
try:
OptionParser._process_short_opts(self, rargs, values)
except BadOptionError as err:
self.largs.append(err.opt_str)
def main():
# Command Line Options
usage = './preconfigure.py [options]' \
'\nNote: Options not listed below are passed to the automake configure.' \
'\n Compiler flags must be set with \'export CFLAGS=...\' or \'export CXXFLAGS=...\' ' \
'\n before calling this script.'
parser = PassThroughOptionParser(usage = usage)
parser.add_option("--enable-direct-diff", action="store_true",
help="Enable direct differentiation mode support", dest="directdiff", default=False)
parser.add_option("--enable-autodiff", action="store_true",
help="Enable Automatic Differentiation support", dest="ad_support", default=False)
parser.add_option("--with-ad", action="store", type = "string", help="AD Tool, CODI/ADOLC", default="CODI", dest="adtool")
parser.add_option("--enable-mpi", action="store_true",
help="Enable mpi support", dest="mpi_enabled", default=False)
parser.add_option("--enable-PY_WRAPPER", action="store_true",
help="Enable Python wrapper compilation", dest="py_wrapper_enabled", default=False)
parser.add_option("--disable-tecio", action="store_true",
help="Disable Tecplot binary support", dest="tecio_disabled", default=False)
parser.add_option("--disable-normal", action="store_true",
help="Disable normal mode support", dest="normal_mode", default=False)
parser.add_option("-c" , "--check", action="store_true",
help="Check the source code for potential problems", dest="check", default=False)
parser.add_option("-r" , "--replace", action="store_true",
help="Do a search and replace of necessary symbols. Creates back up of source files.", dest="replace", default=False)
parser.add_option("-d" , "--delete", action="store_true",
help="Removes the back up files.", dest="remove", default=False)
parser.add_option("-v" , "--revert", action="store_true",
help="Revert files to original state.", dest="revert", default=False)
parser.add_option("-u", "--update", action="store_true",
help="Update and recompile submodules.", dest="update", default=False)
(options, args)=parser.parse_args()
options.adtool = options.adtool.upper()
if options.directdiff == False:
adtool_dd = ""
else:
adtool_dd = options.adtool
if options.ad_support == False:
adtool_da = ""
else:
adtool_da = options.adtool
conf_environ = os.environ
made_adolc = False
made_codi = False
header()
modes = {'SU2_BASE' : not options.normal_mode == True,
'SU2_DIRECTDIFF' : adtool_dd ,
'SU2_AD' : adtool_da }
# Create a dictionary from the arguments
argument_dict = dict(zip(args[::2],args[1::2]))
# Set the default installation path (if not set with --prefix)
argument_dict['--prefix'] = argument_dict.get('--prefix', os.getcwd().rstrip())
if not options.check:
if any([modes["SU2_AD"] == 'CODI', modes["SU2_DIRECTDIFF"] == 'CODI']):
conf_environ, made_codi = init_codi(argument_dict,modes,options.mpi_enabled, options.update)
configure(argument_dict,
conf_environ,
options.mpi_enabled,
options.py_wrapper_enabled,
options.tecio_disabled,
modes,
made_adolc,
made_codi)
if options.check:
prepare_source(options.replace, options.remove, options.revert)
def prepare_source(replace = False, remove = False, revert = False):
# Directories containing the source code
print('Preparing source code ...')
dir_list = [ "Common",
"SU2_CFD",
"SU2_DEF",
"SU2_DOT",
"SU2_GEO",
"SU2_SOL",
"SU2_MSH"]
file_list = ""
exclude_dic_lines = {}
exclude_dic_files = {}
exclude_file_name = 'preconf.exclude'
# # Build the dictionaries for line and file excludes that
# # are defined in the exlude file 'preconf.exclude'.
# # Syntax:
# # PathTo/File[:Line1,Line2,...]
# if os.path.exists(exclude_file_name):
# print 'Reading \'' + exclude_file_name + '\' ...'
# with open(exclude_file_name, 'r') as exclude:
# for line in exclude:
# exclude_line = line.split(':')
# exclude_file = exclude_line[0].rstrip()
# if len(exclude_line) > 1:
# exclude_lines = exclude_line[1].split(',')
# for index,item in enumerate(exclude_lines):
# exclude_lines[index] = int(item.rstrip())
# exclude_dic_lines[exclude_line[0].rstrip()] = exclude_lines
# else:
# exclude_dic_files[exclude_line[0].rstrip()] = [-1]
# else:
# print('Exclude file \'' + exclude_file_name + '\' not found. Checking all files.')
# Hardcoded files that will be skipped
exclude_dic_files = { 'Common/include/datatype_structure.hpp' : [-1],
'Common/include/datatype_structure.inl' : [-1],
'Common/include/mpi_structure.hpp' : [-1],
'Common/include/mpi_structure.inl' : [-1],
'Common/src/datatype_structure.cpp': [-1],
'Common/src/mpi_structure.cpp' : [-1] }
str_double = 'double'
regex_double = re.compile(r'(^|[^\w])('+str_double+')([^\w]|$)')
replacement_double = r'\1su2double\3'
simple_replacements = {'MPI_Reduce' : 'SU2_MPI::Reduce',
'MPI_Allreduce' : 'SU2_MPI::Allreduce',
'MPI_Gather' : 'SU2_MPI::Gather',
'MPI_Allgather' : 'SU2_MPI::Allgather',
'MPI_Isend' : 'SU2_MPI::Isend',
'MPI_Irecv' : 'SU2_MPI::Irecv',
'MPI_Send' : 'SU2_MPI::Send',
'MPI_Wait' : 'SU2_MPI::Wait',
'MPI_Waitall' : 'SU2_MPI::Waitall',
'MPI_Waitany' : 'SU2_MPI::Waitany',
'MPI_Bsend' : 'SU2_MPI::Bsend' ,
'MPI_Bcast' : 'SU2_MPI::Bcast',
'MPI_Sendrecv' : 'SU2_MPI::Sendrecv',
'MPI_Init' : 'SU2_MPI::Init',
'MPI_Recv' : 'SU2_MPI::Recv',
'MPI_Comm_size' : 'SU2_MPI::Comm_size',
'MPI_Comm_rank' : 'SU2_MPI::Comm_rank',
'MPI_Init' : 'SU2_MPI::Init',
'MPI_Barrier' : 'SU2_MPI::Barrier',
'MPI_Abort' : 'SU2_MPI::Abort',
'MPI_Request' : 'SU2_MPI::Request',
'MPI_Get_count' : 'SU2_MPI::Get_count',
'MPI_Finalize' : 'SU2_MPI::Finalize',
'MPI_Buffer_detach': 'SU2_MPI::Buffer_detach',
'MPI_Buffer_attach': 'SU2_MPI::Buffer_attach',
'MPI_Status' : 'SU2_MPI::Status',
'sprintf' : 'SPRINTF'}
regex_cast_1 = re.compile(r'(^|[^\w|^\\])(int)(\s*\()')
replacement_cast_1 = r'\1SU2_TYPE::Int\3'
regex_cast_2 = re.compile(r'\(int\)\s*')
logfile = open ('preconf.log','w')
backup_ext = '.orig'
print('Checking for problems...')
# Test each source file for the occurrence of missing replacements
# and print the respective lines.
for dir in dir_list:
file_list = glob.glob(dir+os.path.sep+'*[src,include]'+os.path.sep+'*[.cpp,.hpp,.inl]')
for file in file_list:
if not file in exclude_dic_files.keys():
if all([not replace, revert]):
# Check if back up file exists
if os.path.isfile(file + backup_ext):
os.remove(file);
shutil.copy(file + backup_ext, file)
else:
print('Cannot find backup file ' + file + backup_ext)
# Remove backup files if requested
if all([not replace, remove]):
if os.path.isfile(file + backup_ext):
print('Removing' + file + backup_ext)
os.remove(file + backup_ext)
if all([not remove, not revert]):
num_found = 0
found_line = ""
ignore_line = ""
new_line = ""
for line in fileinput.input(file, inplace = 1, backup = backup_ext):
new_line = line.rstrip('\n')
if any([re.findall(regex_double, line), find_all(line, simple_replacements), re.findall(regex_cast_1, line)]):
if not fileinput.lineno() in exclude_dic_lines.get(file,[]):
if replace:
new_line = replace_all(new_line, simple_replacements)
new_line = re.sub(regex_double, replacement_double, new_line)
new_line = re.sub(regex_cast_1, replacement_cast_1, new_line)
found_line = found_line + '\tLine ' + str(fileinput.lineno()) +': ' + line.rstrip() + '\n\t\t => ' + new_line.rstrip() + '\n'
else:
found_line = found_line + '\tLine ' + str(fileinput.lineno()) +': ' + line.rstrip() + '\n'
num_found = num_found + 1
else:
ignore_line = ignore_line + 'Ignoring line ' + str(fileinput.lineno()) + ' in ' + file + ' (' + line.rstrip() + ')\n'
print(new_line)
if num_found > 0:
if replace:
print('Solved ' + str(num_found) + ' potential problem(s) in ' + file + '.')
logfile.write('Solved ' + str(num_found) + ' potential problem(s) in ' + file + ':\n')
else:
print('Found ' + str(num_found) + ' potential problem(s) in ' + file + '.')
logfile.write('Found ' + str(num_found) + ' potential problem(s) in ' + file + ':\n')
logfile.write( found_line )
else:
os.remove(file + backup_ext)
if not ignore_line == "":
print(ignore_line.rstrip())
else:
print('Ignoring file ' + file)
print('\nPlease check preconf.log to get more information about potential problems.')
def replace_all(text, dic):
for i, j in dic.iteritems():
text = text.replace(i, j)
return text
def find_all(text, dic):
for i,j in dic.iteritems():
if not text.find(i) == -1:
return True
return False
def init_codi(argument_dict, modes, mpi_support = False, update = False):
modules_failed = True
# This information of the modules is used if projects was not cloned using git
# The sha tag must be maintained manually to point to the correct commit
sha_version_codi = 'bd4a639c2fe625a80946c8365bd2976a2868cf46'
github_repo_codi = 'https://github.com/scicompkl/CoDiPack'
sha_version_medi = '46a97e1d6e8fdd3cb42b06534cff6acad2a49693'
github_repo_medi = 'https://github.com/SciCompKL/MeDiPack'
medi_name = 'MeDiPack'
codi_name = 'CoDiPack'
alt_name_medi = 'externals/medi'
alt_name_codi = 'externals/codi'
# Some log and error files
log = open( 'preconf.log', 'w' )
err = open( 'preconf.err', 'w' )
pkg_environ = os.environ
codi_status = False
ampi_status = False
print("Checking the status of submodules")
print('=====================================================================')
# Remove modules if update is requested
if update:
if os.path.exists(alt_name_codi):
print('Removing ' + alt_name_codi)
shutil.rmtree(alt_name_codi)
if os.path.exists(alt_name_medi):
print('Removing ' + alt_name_medi)
shutil.rmtree(alt_name_medi)
submodule_check(codi_name, alt_name_codi, github_repo_codi, sha_version_codi, log, err, update)
if mpi_support:
submodule_check(medi_name, alt_name_medi, github_repo_medi, sha_version_medi, log, err, update)
return pkg_environ, True
def submodule_check(name, alt_name, github_rep, sha_tag, log, err, update = False):
try:
status = submodule_status(alt_name, update)
if status:
print('Found correct version of ' + name + ' in ' + alt_name + '.')
except RuntimeError:
if all([os.path.exists(alt_name), not os.path.exists(alt_name + '/' + sha_tag)]):
print('Found an old or unspecified version of ' + name + ' in ' + alt_name + '.\nUse -u to reset module.')
sys.exit()
if not os.path.exists(alt_name):
print('\ngit command failed (either git is not installed or this is not a git repository).')
print('\nUsing fall-back method to initialize submodule ' + name)
download_module(name, alt_name, github_rep, sha_tag, log, err)
else:
print('Found correct version of ' + name + ' in ' + alt_name + '.')
def submodule_status(path, update):
try:
status = check_output('git submodule status ' + path).decode()
except RuntimeError:
raise RuntimeError
status_indicator = status[0][0]
if status_indicator == '+':
sys.stderr.write('WARNING: the currently checked out submodule commit in ' + path + ' does not match the SHA-1 found in the index.\n')
sys.stderr.write('Use \'git submodule update --init '+ path + '\' to reset the module if necessary.\n')
return False
elif any([status_indicator == '-', update]):
print('Initialize submodule ' + path + ' using git ... ')
subprocess.check_call('git submodule update --init ' + path, shell = True)
return True
def download_module(name, alt_name, git_repo, commit_sha, logfile, errorfile):
print('\nInitializing ' + name + ' \'' + commit_sha + '\'')
print('=====================================================================')
# Download package
try:
print('Downloading module from ' + git_repo)
subprocess.check_call('wget -N ' + git_repo + '/archive/' + commit_sha + '.zip', stdout = logfile, stderr = errorfile, shell = True )
except subprocess.CalledProcessError:
print('Download of module ' + name + ' failed. See preconf.err for more information.')
print('To download it manually, perform the following steps:')
print('\t - Download the zip at \"' + git_repo + '/archive/' + commit_sha + '.zip\"')
print('\t - Extract the archive to externals/' + alt_name)
print('\t - Execute command \'touch externals/'+ alt_name + '/' + commit_sha + '\'')
print('\t - Run preconfigure.py again')
sys.exit()
# Extract zip archive
try:
print('Extracting archive ...')
subprocess.check_call('unzip -u ' + commit_sha + '.zip', stdout = logfile, stderr = errorfile, shell=True)
except subprocess.CalledProcessError:
print('Extraction of module ' + name + ' failed. See preconf.err for more information.')
sys.exit()
# Rename folder and create a file to identify the version
try:
print('Creating identifier ...')
subprocess.check_call('mv '+ name + '-' + commit_sha + ' ' + alt_name + ' && touch ' + alt_name + '/' + commit_sha, stdout = logfile, stderr = errorfile, shell = True)
except subprocess.CalledProcessError:
print('Renaming of module ' + name + ' failed. See preconf.err for more information.')
sys.exit()
# Remove archive
subprocess.check_call('rm ' + commit_sha + '.zip', shell=True)
def configure(argument_dict,
conf_environ,
mpi_support,
py_wrapper,
tecio,
modes,
made_adolc,
made_codi):
# Boostrap to generate Makefile.in
bootstrap_command = './bootstrap'
# Set the base command for running configure
configure_base = '../configure'
# Add the arguments to the configure command
for arg in argument_dict:
configure_base = configure_base + " " + arg + "=" + argument_dict[arg]
configure_mode = ''
if mpi_support:
configure_base = configure_base + ' --enable-mpi'
if py_wrapper:
configure_base = configure_base + ' --enable-PY_WRAPPER'
if tecio:
configure_base = configure_base + ' --disable-tecio'
build_dirs = ''
print( '\nPreparing build environment\n' \
'=====================================================================')
run_command(bootstrap_command, 'bootstrap.log', 'bootstrap.err', conf_environ)
# Create the commands for the different configurations and run configure
for key in modes:
if modes[key]:
print('\nRunning configure in folder ' + key + ' ', end = '')
if modes[key] == 'CODI':
if key == 'SU2_DIRECTDIFF':
configure_mode = '--enable-codi-forward'
if key == 'SU2_AD':
configure_mode = '--enable-codi-reverse'
print('using ' + modes[key])
elif modes[key] == 'ADOLC':
if key == 'SU2_DIRECTDIFF':
configure_mode = '--enable-adolc-forward'
if key == 'SU2_AD':
configure_mode = '--enable-adolc-reverse'
print('using ' + modes[key])
elif modes[key] == 'COMPLEX':
configure_mode = '--enable-complex'
print('using ' + modes[key])
else:
configure_mode = ''
print('')
print('=====================================================================')
log = os.getcwd().rstrip() + '/conf_'+ key+'.log'
err = os.getcwd().rstrip() + '/conf_'+ key+'.err'
if not os.path.exists(key):
os.mkdir(key)
os.chdir(key)
run_command(configure_base + ' ' + configure_mode, log, err, conf_environ)
os.chdir(os.pardir)
build_dirs += key + ' '
write_makefile(build_dirs)
print('\nPre-configuration Summary:\n' \
'=====================================================================\n'\
'\tConfiguration sets: '+ build_dirs + '\n')
print('\tUse "make <install>" to compile (and install) all configured binaries:\n')
if modes['SU2_BASE']:
print('\tSU2_CFD -> General solver for direct, cont. adjoint and linearized equations.\n' \
'\tSU2_DOT -> Gradient Projection Code.\n' \
'\tSU2_DEF -> Mesh Deformation Code.\n' \
'\tSU2_MSH -> Mesh Adaption Code.\n' \
'\tSU2_SOL -> Solution Export Code.\n' \
'\tSU2_GEO -> Geometry Definition Code.\n')
if modes['SU2_AD']:
print('\tSU2_CFD_AD -> Discrete Adjoint Solver and general AD support.')
print('\tSU2_DOT_AD -> Mesh sensitivity computation and general AD support.')
if modes['SU2_DIRECTDIFF']:
print('\tSU2_CFD_DIRECTDIFF -> Direct Differentation Mode.')
print('\n')
print('\tPlease be sure to add the $SU2_HOME and $SU2_RUN environment variables,\n' \
'\tand update your $PATH (and $PYTHONPATH if applicable) with $SU2_RUN.\n' \
'\n' \
'\tBased on the input to this configuration, add these lines to your .bashrc file: \n' \
'\n' \
'\texport SU2_RUN="'+argument_dict['--prefix']+'/bin"\n' \
'\texport SU2_HOME="'+os.getcwd().rstrip()+'"\n' \
'\texport PATH=$PATH:$SU2_RUN\n' \
'\texport PYTHONPATH=$PYTHONPATH:$SU2_RUN\n')
def run_command(command, log, err, env):
try:
logfile = open(log, 'w')
errfile = open(err, 'w')
print('Command: ' + command)
subprocess.check_call(command, env = env, stdout = logfile, stderr = errfile, shell=True)
print('Logfile written to ' + log)
logfile.close()
errfile.close()
except subprocess.CalledProcessError:
errfile = open(err, 'r')
print('\nThere was an error while running command \'' + command + '\'.')
print('=== Error Log ===')
print(errfile.read())
errfile.close()
sys.exit(1)
def check_output(cmd):
std, err = subprocess.Popen([cmd], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell = True).communicate()
if err:
raise RuntimeError(err)
return std
def write_makefile(build_dirs):
print('\nCreating Makefile ...\n')
makefile = open('Makefile', 'w')
makefile.writelines(['# This file is auto-generated by preconfigure.py\n',
'SUBDIRS = '+ build_dirs + '\n',
'INSTALLDIRS = $(SUBDIRS:%=install-%)\n',
'CLEANDIRS = $(SUBDIRS:%=clean-%)\n',
'\n',
'subdirs: $(SUBDIRS)\n',
'\n',
'$(SUBDIRS):\n',
'\t$(MAKE) -C $@\n',
'\n',
'install: $(INSTALLDIRS)\n',
'$(INSTALLDIRS):\n',
'\t$(MAKE) -C $(@:install-%=%) install\n',
'\n',
'clean: $(CLEANDIRS)\n',
'$(CLEANDIRS):\n',
'\t$(MAKE) -C $(@:clean-%=%) clean\n',
'\n',
'.PHONY: subdirs $(SUBDIRS)\n',
'.PHONY: subdirs $(INSTALLDIRS)\n',
'.PHONY: subdirs $(CLEANDIRS)\n',
'.PHONY: install\n'])
makefile.close()
def header():
print('-------------------------------------------------------------------------\n'\
'| ___ _ _ ___ | \n'\
'| / __| | | |_ ) Release 6.2.0 \'Falcon\' | \n'\
'| \__ \ |_| |/ / | \n'\
'| |___/\___//___| Pre-configuration Script | \n'\
'| | \n'\
'------------------------------------------------------------------------- \n'\
'| The current SU2 release has been coordinated by the | \n'\
'| SU2 International Developers Society <www.su2devsociety.org> | \n'\
'| with selected contributions from the open-source community. | \n'\
'------------------------------------------------------------------------- \n'\
'| The main research teams contributing to the current release are: | \n'\
'| - Prof. Juan J. Alonso\'s group at Stanford University. | \n'\
'| - Prof. Piero Colonna\'s group at Delft University of Technology. | \n'\
'| - Prof. Nicolas R. Gauger\'s group at Kaiserslautern U. of Technology. | \n'\
'| - Prof. Alberto Guardone\'s group at Polytechnic University of Milan. | \n'\
'| - Prof. Rafael Palacios\' group at Imperial College London. | \n'\
'| - Prof. Vincent Terrapon\'s group at the University of Liege. | \n'\
'| - Prof. Edwin van der Weide\'s group at the University of Twente. | \n'\
'| - Lab. of New Concepts in Aeronautics at Tech. Inst. of Aeronautics. | \n'\
'------------------------------------------------------------------------- \n'\
'| Copyright 2012-2019, Francisco D. Palacios, Thomas D. Economon, | \n'\
'| Tim Albring, and the SU2 contributors. | \n'\
'| | \n'\
'| SU2 is free software; you can redistribute it and/or | \n'\
'| modify it under the terms of the GNU Lesser General Public | \n'\
'| License as published by the Free Software Foundation; either | \n'\
'| version 2.1 of the License, or (at your option) any later version. | \n'\
'| | \n'\
'| SU2 is distributed in the hope that it will be useful, | \n'\
'| but WITHOUT ANY WARRANTY; without even the implied warranty of | \n'\
'| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | \n'\
'| Lesser General Public License for more details. | \n'\
'| | \n'\
'| You should have received a copy of the GNU Lesser General Public | \n'\
'| License along with SU2. If not, see <http://www.gnu.org/licenses/>. | \n'\
'------------------------------------------------------------------------- \n')
# -------------------------------------------------------------------
# Run Main Program
# -------------------------------------------------------------------
# this is only accessed if running from command prompt
if __name__ == '__main__':
main()
| srange/SU2 | preconfigure.py | Python | lgpl-2.1 | 28,082 | 0.012143 |
# Copyright 2009-2010 Doug Orleans. Distributed under the GNU Affero
# General Public License v3. See COPYING for details.
from google.appengine.api import urlfetch
import urllib
from xml.dom import minidom
import time
mbns = 'http://musicbrainz.org/ns/mmd-1.0#'
extns = 'http://musicbrainz.org/ns/ext-1.0#'
# Since the Musicbrainz XML Web service does rate-limiting by IP, and
# requests from multiple Google App Engine apps might all come from
# the same IP, we are likely to get rate-limited no matter how slow we
# ourselves go. So instead we have to go through a proxy at a
# different address.
def proxify(url):
return ('http://steak.place.org/servlets/mb-mirror.ss?'
+ urllib.urlencode({ 'url': url }))
def xmlHttpRequest(url):
time.sleep(1)
url = proxify(url)
response = urlfetch.fetch(url, deadline=10)
if response.status_code != 200:
raise HTTPError(url, response)
return minidom.parseString(response.content)
class Resource:
@classmethod
def url(cls):
return 'http://musicbrainz.org/ws/1/' + cls.type + '/'
@classmethod
def getElement(cls, id, *inc):
fields = { 'type': 'xml', 'inc': ' '.join(inc) }
url = cls.url() + id + '?' + urllib.urlencode(fields)
doc = xmlHttpRequest(url)
return elementField(doc.documentElement, cls.type)
@classmethod
def searchElements(cls, **fields):
for key in fields:
fields[key] = fields[key].encode('utf-8')
fields['type'] = 'xml'
url = cls.url() + '?' + urllib.urlencode(fields)
doc = xmlHttpRequest(url)
return doc.getElementsByTagNameNS(mbns, cls.type)
class Artist(Resource):
type = 'artist'
def __init__(self, id=None, elt=None):
if elt == None:
elt = self.getElement(id)
self.score = elt.getAttributeNS(extns, 'score')
self.id = elt.getAttribute('id')
self.name = elementFieldValue(elt, 'name')
self.sortname = elementFieldValue(elt, 'sort-name')
self.disambiguation = elementFieldValue(elt, 'disambiguation')
def releaseGroups(self):
return ReleaseGroup.search(artistid=self.id)
@classmethod
def search(cls, **fields):
artists = cls.searchElements(**fields)
return [Artist(elt=elt) for elt in artists]
class ReleaseGroup(Resource):
type = 'release-group'
def __init__(self, id=None, elt=None):
if elt == None:
elt = self.getElement(id, 'artist')
self.score = elt.getAttributeNS(extns, 'score')
self.id = elt.getAttribute('id')
self.type = elt.getAttribute('type')
self.artist = Artist(elt=elementField(elt, 'artist'))
self.title = elementFieldValue(elt, 'title')
@classmethod
def search(cls, **fields):
rgs = cls.searchElements(**fields)
return [ReleaseGroup(elt=elt) for elt in rgs]
def elementField(elt, fieldName):
fields = elt.getElementsByTagNameNS(mbns, fieldName)
if fields:
return fields[0]
def elementFieldValue(elt, fieldName):
field = elementField(elt, fieldName)
if field:
return textContent(field)
# Node.textContent is only in DOM Level 3...
def textContent(node):
node.normalize()
return ''.join(node.data for node in node.childNodes
if node.nodeType == node.TEXT_NODE)
class HTTPError(Exception):
def __init__(self, url, response):
self.url = url
self.response = response
def __str__(self):
return 'HTTPError: ' + str(self.response.status_code)
| dougo/chugchanga-poll | musicbrainz.py | Python | agpl-3.0 | 3,581 | 0.004747 |
import os
import aiohttp
import random
import string
import asyncio
import shutil
import re
from threading import Thread
from io import BytesIO
from zipfile import ZipFile
from discord.ext import commands
from core import BotError
DEFAULT_MAJOR = "512"
DEFAULT_MINOR = "1416"
class WindowsProcessThread(Thread):
def __init__(self, proc, p_args):
super().__init__()
self._proc = proc
self._args = p_args
self.errored = False
self.error_msg = None
def run(self):
winloop = asyncio.ProactorEventLoop()
future = self._proc(winloop, *self._args)
try:
winloop.run_until_complete(future)
except BotError as err:
self.errored = True
self.error_msg = err.message
except Exception:
self.errored = True
self.error_msg = "Unknown error caught in worker thread."
winloop.close()
def validate_byond_build(byond_str):
"""
A little shit of a failed command argument.
Return a tuple containing (major, minor) build information if the argument
string matches the defined format of: v:{major}.{minor} {rest of code here}.
Returns None if such a tuple can't be generated.
"""
if not byond_str.startswith("v:"):
return None
chunks = byond_str.split(" ")
if not len(chunks) > 1:
return None
chunks = chunks[0].split(".")
# This is triggered alyways forever. So. Return null if format doesn't match.
if len(chunks) != 2:
return None
try:
major = int(chunks[0][2:])
minor = int(chunks[1])
except ValueError:
raise BotError("Error processing BYOND version request.", "validate_byond_build")
return major, minor
class DmCog(commands.Cog):
WORK_FOLDER = "cogs\\byond_eval"
DM_BOILERPLATE = "/world/loop_checks = FALSE;\n" + \
"\n/world/New() {{ dm_eval(); del(src); }}" + \
"\n{0}\n/proc/dm_eval() {{ {1} {2} }}"
def __init__(self, bot):
self.bot = bot
self._instances = []
self._safety_patterns = [r'#(\s*)?include', r'include', r'##',
r'```.*```', r'`.*`', r'Reboot']
self._safety_expressions = []
self._arg_expression = re.compile(r'(?:(?P<pre_proc>.*);;;)?(?:(?P<proc>.*);;)?(?P<to_out>.*)?')
for patt in self._safety_patterns:
self._safety_expressions.append(re.compile(patt))
def get_work_dir(self):
"""Returns the folder where BYOND versions and instances should be saved."""
cwd = os.getcwd()
return os.path.join(cwd, self.WORK_FOLDER)
def new_instance(self, length):
"""Generates a unique instance ID, one which is currently not in use."""
while True:
rand = "".join([random.choice(string.ascii_letters + string.digits) for _ in range(length)])
if rand not in self._instances:
self._instances.append(rand)
return rand
def cleanup_instance(self, instance_id, instance_dir):
"""Deletes all files associated with an instance and removes it from the list."""
if not os.path.isdir(instance_dir):
return
self._instances.remove(instance_id)
shutil.rmtree(instance_dir, ignore_errors=True)
def process_args(self, code):
"""
Generates an array of code segments to be placed into the compiled DM code.
Returned dictionary must have three keys: "pre_proc", "proc", and "to_out".
If those pieces do not exist, they are to be set as None. As to avoid key
errors further down the call stack.
"""
res = self._arg_expression.match(code)
if not res or not res.groupdict():
raise BotError("No valid code sent.", "process_args")
code_segs = {"pre_proc": None, "proc": None, "to_out": None}
res_dict = res.groupdict()
for key in code_segs:
if key in res_dict:
code_segs[key] = res_dict[key]
if (code_segs["pre_proc"] and
not code_segs["pre_proc"].endswith(";") and
not code_segs["pre_proc"].endswith("}")):
code_segs["pre_proc"] += ";"
if (code_segs["proc"] and not code_segs["proc"].endswith(";")
and not code_segs["proc"].endswith(";")):
code_segs["proc"] += ";"
if code_segs["to_out"]:
code_segs["to_out"] = code_segs["to_out"].split(";")
return code_segs
def validate_dm(self, code):
"""Validates the code given for potential exploits."""
for expr in self._safety_expressions:
if expr.search(code):
raise BotError("Disallowed/dangerous code found. Aborting.", "validate_dm")
def generate_dm(self, segments, instance_dir):
"""Generates the .dme file to be compiled."""
with open(f"{instance_dir}\\eval.dme", "w+") as f:
if not segments["pre_proc"]:
segments["pre_proc"] = ""
if segments["to_out"]:
var_dump = ""
for var in segments["to_out"]:
var_dump += f"world.log << {var};"
segments["to_out"] = var_dump
self.validate_dm(var_dump)
else:
segments["to_out"] = ""
if not segments["proc"]:
segments["proc"] = ""
output = self.DM_BOILERPLATE
output = output.format(segments["pre_proc"], segments["proc"], segments["to_out"])
f.write(output)
async def compile_dm(self, loop, instance_dir, major, minor):
"""Executor proc to compile the .dme file provided."""
dm_path = os.path.join(self.get_work_dir(),
f"byond{major}.{minor}\\byond\\bin\\dm.exe")
if not os.path.isfile(dm_path):
raise BotError("dm.exe not found.", "compile_dm")
dme_path = os.path.join(instance_dir, "eval.dme")
if not os.path.isfile(dme_path):
raise BotError(".dme under evaluation not found.", "compile_dm")
process = await asyncio.create_subprocess_exec(*[dm_path, dme_path], loop=loop,
stderr=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.DEVNULL)
try:
await asyncio.wait_for(process.wait(), timeout=60.0, loop=loop)
except TimeoutError:
raise BotError("Compiler timed out.", "compile_dm")
if process.returncode != 0:
raise BotError("Error compiling or running DM.", "compile_dm")
def validate_compile(self, instance_dir):
"""Checks wether or not the compiled end result is safe to run."""
dmb_found = False
for fname in os.listdir(instance_dir):
if fname.endswith(".rsc"):
raise BotError("Resource file detected. Execution aborted.", "validate_compile")
elif fname.endswith(".dmb"):
dmb_found = True
if not dmb_found:
raise BotError("Compilation failed and no .dmb was generated.", "validate_compile")
async def run_dm(self, loop, instance_dir, major, minor):
"""Executor proc to host and run the .dmb file provided."""
dd_path = os.path.join(self.get_work_dir(),
f"byond{major}.{minor}\\byond\\bin\\dreamdaemon.exe")
if not os.path.isfile(dd_path):
raise BotError("dreadaemon.exe not found.", "run_dm")
dmb_path = os.path.join(instance_dir, "eval.dmb")
if not os.path.isfile(dmb_path):
raise BotError(".dmb under evaluation not found.", "run_dm")
p_args = [dd_path, dmb_path] + ["-invisible", "-ultrasafe", "-logself", "-log", "output.log", "-once", "-close", "-quiet"]
process = await asyncio.create_subprocess_exec(*p_args, loop=loop,
stderr=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.DEVNULL)
try:
await asyncio.wait_for(process.wait(), timeout=60.0, loop=loop)
except TimeoutError:
raise BotError("DreamDaemon timed out.", "run_dm")
async def run_executor(self, proc, p_args):
"""A helper for running Windows subprocesses in a separate thread."""
thread = WindowsProcessThread(proc, p_args)
thread.start()
cycles = 0
while cycles < 60:
if not thread.is_alive():
break
cycles += 1
await asyncio.sleep(1)
error = thread.errored
error_msg = thread.error_msg
thread.join()
if error:
raise BotError(error_msg, "run_executor")
def get_output(self, instance_dir):
"""Returns a string containing the first 30 lines from the test instance's log."""
log_path = os.path.join(instance_dir, "output.log")
if not os.path.isfile(log_path):
return "Error: no log file found."
with open(log_path, "r") as file:
content = file.readlines()
if len(content) < 2:
return "No contents found in the log file."
content = [x.strip() for x in content]
content = content[1:11]
content = "\n".join(content)
if len(content) > 1750:
content = content[0:1750] + "\n...Cut-off reached..."
out = "World.log output:\n```\n" + content + "\n```"
return out
def byond_found(self, major=DEFAULT_MAJOR, minor=DEFAULT_MINOR):
"""Checks whether or not the specified version is already found in the test folder."""
path = self.get_work_dir()
byond_path = os.path.join(path, f"byond{major}.{minor}")
if os.path.isdir(byond_path) and os.path.isfile(f"{byond_path}\\byond\\bin\\dm.exe"):
return True
return False
async def setup_byond(self, major=DEFAULT_MAJOR, minor=DEFAULT_MINOR):
"""Downloads and unzips the provided BYOND version."""
path = self.get_work_dir()
byond_path = os.path.join(path, f"byond{major}.{minor}")
url = f"http://www.byond.com/download/build/{major}/{major}.{minor}_byond.zip"
async with aiohttp.ClientSession() as session:
async with session.get(url) as resp:
try:
data = await resp.read()
except Exception:
raise BotError("Unable to download the BYOND zip file.", "init_byond")
if resp.status != 200:
raise BotError("Unable to download the specified BYOND version.", "init_byond")
with ZipFile(BytesIO(data)) as z:
z.extractall(byond_path)
@commands.command(aliases=["dmeval", "dme"])
@commands.cooldown(1, 5, commands.BucketType.user)
async def dm_eval(self, ctx, *, code):
"""
Evaluates given DM code by compiling and running it. Accepts a maximum
of 4 formatted arguments: v:{byond_major}.{byond.minor} {global_code};;;{eval_code};;{vars;to;log}.
All arguments other than {vars;to;log} are optional and may simply be omitted.
So at bare minimum you simply need to write some variables/expressions to be
evaluated and printed to world.log.
"""
try:
version_tuple = validate_byond_build(code)
if not version_tuple:
version_tuple = (DEFAULT_MAJOR, DEFAULT_MINOR)
else:
code = code[(code.find(" ") + 1):]
if not self.byond_found(*version_tuple):
await ctx.send(f"Version {version_tuple[0]}.{version_tuple[1]} not cached. Downloading. (This may take a bit.)")
await self.setup_byond(*version_tuple)
except BotError as err:
await ctx.send(f"Error while setting up BYOND:\n{err}")
return
except Exception:
await ctx.send(f"Unrecognized exception while setting up BYOND.")
return
instance = self.new_instance(32)
instance_folder = os.path.join(self.get_work_dir(), f"_instances\\{instance}")
if not os.path.isdir(instance_folder):
os.makedirs(instance_folder)
try:
segs = self.process_args(code)
self.generate_dm(segs, instance_folder)
executor_args = [instance_folder, version_tuple[0], version_tuple[1]]
await self.run_executor(self.compile_dm, executor_args)
self.validate_compile(instance_folder)
await self.run_executor(self.run_dm, executor_args)
except BotError as err:
await ctx.send(f"Error compiling or running code:\n{err}")
except Exception:
await ctx.send("Unrecognized error while compiling or running code.")
else:
await ctx.send(self.get_output(instance_folder))
self.cleanup_instance(instance, instance_folder)
@commands.command(aliases=["dmversion", "dmv"])
async def dm_version(self, ctx):
"""Reports the default version of BYOND used by dm_eval."""
await ctx.send(f"The default version of BYOND used for `dm_eval` is: {DEFAULT_MAJOR}.{DEFAULT_MINOR}.")
def setup(bot):
bot.add_cog(DmCog(bot)) | Aurorastation/BOREALISbot2 | cogs/dm_eval.py | Python | agpl-3.0 | 13,511 | 0.003553 |
"""Test driver interface
:copyright: Copyright 2019 Marshall Ward, see AUTHORS for details
:license: Apache License, Version 2.0, see LICENSE for details
"""
import os
import shlex
import shutil
import subprocess
from payu.models.model import Model
config_files = [
'data',
'diag',
'input.nml'
]
class Test(Model):
def __init__(self, expt, name, config):
# payu initialisation
super(Test, self).__init__(expt, name, config)
# Model-specific configuration
self.model_type = 'test'
self.default_exec = 'test.exe'
self.config_files = config_files
| marshallward/payu | payu/models/test.py | Python | apache-2.0 | 649 | 0 |
#
# uchroma - Copyright (C) 2021 Stefanie Kondik
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# pylint: disable=invalid-name
import re
import pydbus
BASE_PATH = '/org/chemlab/UChroma'
SERVICE = 'org.chemlab.UChroma'
class UChromaClient(object):
def __init__(self):
self._bus = pydbus.SessionBus()
def get_device_paths(self) -> list:
dm = self._bus.get(SERVICE)
return dm.GetDevices()
def get_device(self, identifier):
if identifier is None:
return None
use_key = False
if isinstance(identifier, str):
if identifier.startswith(BASE_PATH):
return self._bus.get(SERVICE, identifier)
if re.match(r'\w{4}:\w{4}.\d{2}', identifier):
use_key = True
elif re.match(r'\d+', identifier):
identifier = int(identifier)
else:
return None
for dev_path in self.get_device_paths():
dev = self.get_device(dev_path)
if use_key and identifier == dev.Key:
return dev
elif identifier == dev.DeviceIndex:
return dev
return None
def get_layer(self, device, layer_idx):
layers = device.CurrentRenderers
if layer_idx >= len(layers):
raise ValueError("Layer index out of range")
return self._bus.get(SERVICE, layers[layer_idx][1])
if __name__ == '__main__':
uclient = UChromaClient()
for u_dev_path in uclient.get_device_paths():
u_dev = uclient.get_device(u_dev_path)
print('[%s]: %s (%s / %s)' % \
(u_dev.Key, u_dev.Name, u_dev.SerialNumber, u_dev.FirmwareVersion))
| cyanogen/uchroma | uchroma/client/dbus_client.py | Python | lgpl-3.0 | 2,107 | 0.002373 |
# -*- coding: utf-8 -*-
from lutefiskdemo.settings import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
MAINTENANCE_MODE = 'DEVELOPMENT'
EMAIL_PORT = 1025
INSTALLED_APPS += (
'debug_toolbar',
)
MIDDLEWARE_CLASSES += (
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
INTERNAL_IPS = (
'127.0.0.1',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
}
# Local Variables:
# indent-tabs-mode: nil
# End:
# vim: ai et sw=4 ts=4
| rentalita/django-lutefiskdemo | src/python/lutefiskdemo/development.py | Python | mit | 475 | 0 |
import string
import numpy
import six
import cupy
from cupy import carray
from cupy import cuda
from cupy import util
six_range = six.moves.range
six_zip = six.moves.zip
def _get_simple_elementwise_kernel(
params, operation, name, preamble,
loop_prep='', after_loop='', options=()):
module_code = string.Template('''
${preamble}
extern "C" __global__ void ${name}(${params}) {
${loop_prep};
CUPY_FOR(i, _ind.size()) {
_ind.set(i);
${operation};
}
${after_loop};
}
''').substitute(
params=params,
operation=operation,
name=name,
preamble=preamble,
loop_prep=loop_prep,
after_loop=after_loop)
module = carray.compile_with_cache(module_code, options)
return module.get_function(name)
_typenames = {
numpy.dtype('float64'): 'double',
numpy.dtype('float32'): 'float',
numpy.dtype('float16'): 'float16',
numpy.dtype('int64'): 'long long',
numpy.dtype('int32'): 'int',
numpy.dtype('int16'): 'short',
numpy.dtype('int8'): 'signed char',
numpy.dtype('uint64'): 'unsigned long long',
numpy.dtype('uint32'): 'unsigned int',
numpy.dtype('uint16'): 'unsigned short',
numpy.dtype('uint8'): 'unsigned char',
numpy.dtype('bool'): 'bool',
}
_scalar_type = (int, float, bool) + tuple(t.type for t in _typenames.keys())
def _get_typename(dtype):
if dtype is None:
raise ValueError('dtype is None')
return _typenames[numpy.dtype(dtype)]
def _check_args(args):
dev = cuda.Device()
cp_array = cupy.ndarray
scalar_type = _scalar_type
for arg in args:
if isinstance(arg, cp_array):
if arg.data.device != dev:
raise ValueError('Array device must be same as the current '
'device: array device = %d while current = %d'
% (arg.device.id, dev.id))
elif not isinstance(arg, scalar_type):
raise TypeError('Unsupported type %s' % type(arg))
def _get_args_info(args):
ret = []
carray_Indexer = carray.Indexer
ret_append = ret.append
for a in args:
t = type(a)
if t == carray_Indexer:
dtype = None
else:
dtype = a.dtype.type
ret_append((t, dtype, a.ndim))
return tuple(ret)
def _get_kernel_params(params, args_info):
ret = []
for p, a in six_zip(params, args_info):
type, dtype, ndim = a
is_array = type is cupy.ndarray
if type is carray.Indexer:
t = 'CIndexer<%d>' % ndim
else:
t = _get_typename(dtype)
if is_array:
t = 'CArray<%s, %d>' % (t, ndim)
ret.append('%s%s %s%s' % ('const ' if p.is_const else '',
t,
'_raw_' if is_array and not p.raw else '',
p.name))
return ', '.join(ret)
def _reduce_dims(args, params, shape):
ndim = len(shape)
if ndim <= 1:
return args, shape
cp_array = cupy.ndarray
is_array_flags = [not p.raw and isinstance(a, cp_array)
for p, a in six_zip(params, args)]
args_strides = [a._strides for a, f in six_zip(args, is_array_flags) if f]
src_shape = shape
shape = list(src_shape)
cnt = 0
for i in six_range(1, ndim):
j = i - 1
shape_i = shape[i]
shape_j = shape[j]
if shape_j == 1:
continue
for strides in args_strides:
if strides[i] * shape_i != strides[j]:
cnt += 1
axis = j
break
else:
shape[i] *= shape_j
shape[j] = 1
if shape[-1] != 1:
cnt += 1
axis = -1
if not cnt:
return args, src_shape
elif cnt == 1:
new_shape = shape[axis],
args = list(args)
for i, a in enumerate(args):
if is_array_flags[i]:
a = args[i] = a.view()
a._shape = new_shape
a._strides = a._strides[axis],
return args, new_shape
new_shape = tuple([dim for dim in shape if dim != 1])
args = list(args)
for i, a in enumerate(args):
if is_array_flags[i]:
a = args[i] = a.view()
a._shape = new_shape
a._strides = tuple(
[st for st, sh in six_zip(a._strides, shape) if sh != 1])
return args, new_shape
class ParameterInfo(object):
def __init__(self, str, is_const):
self.name = None
self.dtype = None
self.ctype = None
self.raw = False
self.is_const = is_const
s = tuple(i for i in str.split() if len(i) != 0)
if len(s) < 2:
raise Exception('Syntax error: %s' % str)
t, self.name = s[-2:]
if t == 'CIndexer':
pass
elif len(t) == 1:
self.ctype = t
else:
dtype = numpy.dtype(t)
self.dtype = dtype.type
if dtype.name != t:
raise ValueError('Wrong type %s' % t)
self.ctype = _get_typename(self.dtype)
for i in s[:-2]:
if i == 'raw':
self.raw = True
else:
raise Exception('Unknown keyward "%s"' % i)
@util.memoize()
def _get_param_info(s, is_const):
if len(s) == 0:
return ()
return tuple([ParameterInfo(i, is_const) for i in s.strip().split(',')])
@util.memoize()
def _decide_params_type(in_params, out_params, in_args_dtype, out_args_dtype):
type_dict = {}
if out_args_dtype:
assert len(out_params) == len(out_args_dtype)
for p, a in six_zip(out_params, out_args_dtype):
if a is None:
raise TypeError('Output arguments must be cupy.ndarray')
if p.dtype is not None:
if a != p.dtype:
raise TypeError(
'Type is mismatched. %s %s %s' % (p.name, a, p.dtype))
elif p.ctype in type_dict:
t = type_dict[p.ctype]
if t != a:
raise TypeError(
'Type is mismatched. %s %s %s %s' % (
p.name, a, t, p.ctype))
else:
type_dict[p.ctype] = a
assert len(in_params) == len(in_args_dtype)
unknown_ctype = []
for p, a in six_zip(in_params, in_args_dtype):
if a is None:
if p.dtype is None:
unknown_ctype.append(p.ctype)
else:
if p.dtype is not None:
if a != p.dtype:
raise TypeError(
'Type is mismatched. %s %s %s' % (p.name, a, p.dtype))
elif p.ctype in type_dict:
t = type_dict[p.ctype]
if t != a:
raise TypeError(
'Type is mismatched. %s %s %s %s' % (
p.name, a, t, p.ctype))
else:
type_dict[p.ctype] = a
in_types = tuple([type_dict[p.ctype] if p.dtype is None else p.dtype
for p in in_params])
out_types = tuple([type_dict[p.ctype] if p.dtype is None else p.dtype
for p in out_params])
return in_types, out_types, tuple(type_dict.items())
def _broadcast(args, params, use_size):
value = [a if not p.raw and isinstance(a, cupy.ndarray) else None
for p, a in six_zip(params, args)]
if use_size:
for i in value:
if i is None:
break
else:
raise ValueError("Specified 'size' can be used only "
"if all of the ndarray are 'raw'.")
else:
for i in value:
if i is not None:
break
else:
raise ValueError('Loop size is Undecided')
brod = cupy.broadcast(*value)
value = [b if a is None else a
for a, b in six_zip(brod.values, args)]
return value, brod.shape
def _get_out_args(out_args, out_types, out_shape):
if not out_args:
return [cupy.empty(out_shape, t) for t in out_types]
for a in out_args:
if not isinstance(a, cupy.ndarray):
raise TypeError(
'Output arguments type must be cupy.ndarray')
if a.shape != out_shape:
raise ValueError('Out shape is mismatched')
return out_args
def _get_out_args_with_params(out_args, out_types, out_shape, out_params):
if not out_args:
for p in out_params:
if p.raw:
raise ValueError('Output array size is Undecided')
return [cupy.empty(out_shape, t) for t in out_types]
for a, p in six_zip(out_args, out_params):
if not isinstance(a, cupy.ndarray):
raise TypeError(
'Output arguments type must be cupy.ndarray')
if a.shape != out_shape and not p.raw:
raise ValueError('Out shape is mismatched')
return out_args
@util.memoize(for_each_device=True)
def _get_elementwise_kernel(args_info, types, params, operation, name,
preamble, kwargs):
kernel_params = _get_kernel_params(params, args_info)
types_preamble = '\n'.join(
'typedef %s %s;' % (_get_typename(v), k) for k, v in types)
preamble = types_preamble + '\n' + preamble
op = []
for p, a in six_zip(params, args_info):
if not p.raw and a[0] == cupy.ndarray:
if p.is_const:
fmt = 'const {t} {n} = _raw_{n}[_ind.get()];'
else:
fmt = '{t} &{n} = _raw_{n}[_ind.get()];'
op.append(fmt.format(t=p.ctype, n=p.name))
op.append(operation)
operation = '\n'.join(op)
return _get_simple_elementwise_kernel(
kernel_params, operation, name,
preamble, **dict(kwargs))
class ElementwiseKernel(object):
"""User-defined elementwise kernel.
This class can be used to define an elementwise kernel with or without
broadcasting.
The kernel is compiled at an invocation of the
:meth:`~ElementwiseKernel.__call__` method,
which is cached for each device.
The compiled binary is also cached into a file under the
``$HOME/.cupy/kernel_cache/`` directory with a hashed file name. The cached
binary is reused by other processes.
Args:
in_params (str): Input argument list.
out_params (str): Output argument list.
operation (str): The body in the loop written in CUDA-C/C++.
name (str): Name of the kernel function. It should be set for
readability of the performance profiling.
reduce_dims (bool): If False, the shapes of array arguments are
kept within the kernel invocation. The shapes are reduced
(i.e., the arrays are reshaped without copy to the minimum
ndims) by default. It may make the kernel fast by reducing the
index calculations.
options (list): Options passed to the nvcc command.
preamble (str): Fragment of the CUDA-C/C++ code that is inserted at the
top of the cu file.
loop_prep (str): Fragment of the CUDA-C/C++ code that is inserted at
the top of the kernel function definition and above the ``for``
loop.
after_loop (str): Fragment of the CUDA-C/C++ code that is inserted at
the bottom of the kernel function definition.
"""
def __init__(self, in_params, out_params, operation,
name='kernel', reduce_dims=True, preamble='', **kwargs):
self.in_params = _get_param_info(in_params, True)
self.out_params = _get_param_info(out_params, False)
self.nin = len(self.in_params)
self.nout = len(self.out_params)
self.nargs = self.nin + self.nout
param_rest = _get_param_info('CIndexer _ind', False)
self.params = self.in_params + self.out_params + param_rest
self.operation = operation
self.name = name
self.reduce_dims = reduce_dims
self.preamble = preamble
self.kwargs = frozenset(kwargs.items())
names = [p.name for p in self.in_params + self.out_params]
if 'i' in names:
raise ValueError("Can not use 'i' as a parameter name")
def __call__(self, *args, **kwargs):
"""Compiles and invokes the elementwise kernel.
The compilation runs only if the kernel is not cached. Note that the
kernels with different argument dtypes or ndims are not compatible. It
means that single ElementwiseKernel object may be compiled into
multiple kernel binaries.
Args:
args: Argumens of the kernel.
size (int): Range size of the indices. If specified, the variable
``n`` is set to this value. Otherwise, the result of
broadcasting is used to determine the value of ``n``.
Returns:
Arrays are returned according to the ``out_params`` argument of the
``__init__`` method.
"""
size = kwargs.pop('size', None)
if kwargs:
raise TypeError('Wrong arguments %s' % kwargs)
n_args = len(args)
if n_args != self.nin and n_args != self.nargs:
raise TypeError('Wrong number of arguments for %s' % self.name)
_check_args(args)
values, shape = _broadcast(args, self.params, size is not None)
in_args = values[:self.nin]
out_args = values[self.nin:]
cp_array = cupy.ndarray
in_ndarray_types = tuple(
[a.dtype.type if isinstance(a, cp_array) else None
for a in in_args])
out_ndarray_types = tuple(
[a.dtype.type if isinstance(a, cp_array) else None
for a in out_args])
in_types, out_types, types = _decide_params_type(
self.in_params, self.out_params,
in_ndarray_types, out_ndarray_types)
out_args = _get_out_args_with_params(
out_args, out_types, shape, self.out_params)
if self.nout == 1:
ret = out_args[0]
else:
ret = tuple(out_args)
if size is not None:
shape = size,
if 0 in shape:
return ret
inout_args = [x if isinstance(x, cp_array) else t(x)
for x, t in six_zip(in_args, in_types)]
inout_args += out_args
if self.reduce_dims:
inout_args, shape = _reduce_dims(
inout_args, self.params, shape)
indexer = carray.Indexer(shape)
inout_args.append(indexer)
args_info = _get_args_info(inout_args)
kern = _get_elementwise_kernel(
args_info, types, self.params, self.operation,
self.name, self.preamble, self.kwargs)
kern.linear_launch(indexer.size, inout_args)
return ret
@util.memoize(for_each_device=True)
def _get_ufunc_kernel(in_types, out_types, routine, args_info, out_raw_types,
params, name, preamble):
kernel_params = _get_kernel_params(params, args_info)
types = []
op = []
for i, x in enumerate(in_types):
types.append('typedef %s in%d_type;' % (_get_typename(x), i))
if args_info[i][0] is cupy.ndarray:
op.append(
'const in{0}_type in{0} = _raw_in{0}[_ind.get()];'.format(i))
for i, x in enumerate(out_types):
types.append('typedef %s out%d_type;' % (_get_typename(x), i))
op.append('{1} &out{0} = _raw_out{0}[_ind.get()];'.format(
i, _get_typename(out_raw_types[i])))
op.append(routine)
operation = '\n'.join(op)
types.append(preamble)
preamble = '\n'.join(types)
return _get_simple_elementwise_kernel(
kernel_params, operation, name, preamble)
def _guess_routine_from_in_types(ops, in_types):
for op in ops:
for dst, src in six_zip(op[0], in_types):
if not numpy.can_cast(src, dst):
break
else:
return op
return None
def _guess_routine_from_dtype(ops, dtype):
for op in ops:
for t in op[1]:
if t != dtype:
break
else:
return op
return None
def _guess_routine(name, cache, ops, in_args, dtype):
if dtype is None:
key = tuple([numpy.dtype(type(i)).type
if isinstance(i, (int, float, bool)) else i.dtype.type
for i in in_args])
else:
key = dtype
op = cache.get(key, ())
if op is ():
if dtype is None:
op = _guess_routine_from_in_types(ops, key)
else:
op = _guess_routine_from_dtype(ops, key)
cache[key] = op
if op:
return op
raise TypeError('Wrong type of arguments for %s' % name)
class ufunc(object):
"""Universal function.
Attributes:
name (str): The name of the universal function.
nin (int): Number of input arguments.
nout (int): Number of output arguments.
nargs (int): Number of all arguments.
"""
def __init__(self, name, nin, nout, ops, preamble='', doc=''):
self.name = name
self.nin = nin
self.nout = nout
self.nargs = nin + nout
self._ops = ops
self._preamble = preamble
self.__doc__ = doc
_in_params = tuple(
ParameterInfo('T in%d' % i, True)
for i in six_range(nin))
_out_params = tuple(
ParameterInfo('T out%d' % i, False)
for i in six_range(nout))
self._params = _in_params + _out_params + (
ParameterInfo('CIndexer _ind', False),)
self._routine_cache = {}
def __repr__(self):
return "<ufunc '%s'>" % self.name
@property
def types(self):
"""A list of type signatures.
Each type signature is represented by type character codes of inputs
and outputs separated by '->'.
"""
types = []
for in_types, out_types, _ in self._ops:
in_str = ''.join([numpy.dtype(t).char for t in in_types])
out_str = ''.join([numpy.dtype(t).char for t in out_types])
types.append('%s->%s' % (in_str, out_str))
return types
def __call__(self, *args, **kwargs):
"""Applies the universal function to arguments elementwise.
Args:
args: Input arguments. Each of them can be a cupy.ndarray object or
a scalar. The output arguments can be omitted or be specified
by the ``out`` argument.
out (cupy.ndarray): Output array. It outputs to new arrays
default.
dtype: Data type specifier.
Returns:
Output array or a tuple of output arrays.
"""
out = kwargs.pop('out', None)
dtype = kwargs.pop('dtype', None)
if dtype is not None:
dtype = numpy.dtype(dtype).type
if kwargs:
raise TypeError('Wrong arguments %s' % kwargs)
n_args = len(args)
if n_args != self.nin and n_args != self.nargs:
raise TypeError('Wrong number of arguments for %s' % self.name)
if out is None:
in_args = args[:self.nin]
out_args = args[self.nin:]
else:
if self.nout != 1:
raise ValueError("Cannot use 'out' in %s" % self.name)
if n_args != self.nin:
raise ValueError("Cannot specify 'out' as both "
"a positional and keyword argument")
in_args = args
out_args = out,
args += out_args
_check_args(args)
broad = cupy.broadcast(*args)
shape = broad.shape
in_types, out_types, routine = _guess_routine(
self.name, self._routine_cache, self._ops, in_args, dtype)
out_args = _get_out_args(out_args, out_types, shape)
if self.nout == 1:
ret = out_args[0]
else:
ret = tuple(out_args)
if 0 in shape:
return ret
inout_args = [x if isinstance(x, cupy.ndarray) else t(x)
for x, t in six_zip(broad.values, in_types)]
inout_args.extend(out_args)
inout_args, shape = _reduce_dims(inout_args, self._params, shape)
indexer = carray.Indexer(shape)
inout_args.append(indexer)
args_info = _get_args_info(inout_args)
out_raw_types = tuple([x.dtype.type for x in out_args])
kern = _get_ufunc_kernel(
in_types, out_types, routine,
args_info, out_raw_types,
self._params, self.name, self._preamble)
kern.linear_launch(indexer.size, inout_args)
return ret
def create_ufunc(name, ops, routine=None, preamble='', doc=''):
_ops = []
for t in ops:
if not isinstance(t, tuple):
typ = t
rt = routine
else:
typ, rt = t
types = typ.split('->')
if len(types) == 1:
in_types = out_types = tuple(types)
else:
in_types, out_types = map(tuple, types)
in_types = tuple([numpy.dtype(t).type for t in in_types])
out_types = tuple([numpy.dtype(t).type for t in out_types])
_ops.append((in_types, out_types, rt))
return ufunc(name, len(_ops[0][0]), len(_ops[0][1]), _ops, preamble, doc)
_id = 'out0 = in0'
copy = create_ufunc(
'cupy_copy',
('?->?', 'b->b', 'B->B', 'h->h', 'H->H', 'i->i', 'I->I', 'l->l', 'L->L',
'q->q', 'Q->Q', 'e->e', 'f->f', 'd->d'),
_id)
copy_where = create_ufunc(
'cupy_copy_where',
('??->?', 'b?->b', 'B?->B', 'h?->h', 'H?->H', 'i?->i', 'I?->I', 'l?->l',
'L?->L', 'q?->q', 'Q?->Q', 'e?->e', 'f?->f', 'd?->d'),
'if (in1) out0 = in0')
_divmod = create_ufunc(
'cupy_divmod',
('bb->b', 'BB->B', 'hh->h', 'HH->H', 'ii->i', 'II->I', 'll->l', 'LL->L',
'qq->q', 'QQ->Q', 'ee->e', 'ff->f', 'dd->d'),
'out0_type a = _floor_divide(in0, in1); out0 = a; out1 = in0 - a * in1')
| sou81821/chainer | cupy/elementwise.py | Python | mit | 22,222 | 0 |
"""
Tests for the module that encompasses fourth species counterpoint.
"""
import unittest
from foox.species.fourth import (Genome, create_population, is_parallel,
make_fitness_function, make_generate_function, make_halt_function,
MAX_REWARD, REWARD_SUSPENSION)
from foox.species.utils import is_suspension
# The cantus firmus to use in the test suite.
CANTUS_FIRMUS = [5, 7, 6, 5, 8, 7, 9, 8, 7, 6, 5]
class TestCreatePopulation(unittest.TestCase):
"""
Ensures the create_population function works as expected.
"""
def test_returns_valid_genomes(self):
"""
Checks the genomes returned by the create_population function are
of the correct type.
"""
result = create_population(1, CANTUS_FIRMUS)
self.assertEqual(Genome, type(result[0]))
def test_returns_correct_number_of_genomes(self):
"""
Ensures the correct number of genomes are returned by the function.
"""
result = create_population(100, CANTUS_FIRMUS)
self.assertEqual(100, len(result))
def test_uses_only_valid_intervals(self):
"""
Tests that only valid consonant intervals are used.
"""
valid_intervals = [2, 4, 5, 7, 9, 11]
result = create_population(20, CANTUS_FIRMUS)
for genome in result:
for i in range(len(genome.chromosome)):
contrapunctus_note = genome.chromosome[i]
cantus_firmus_note = CANTUS_FIRMUS[i]
interval = contrapunctus_note - cantus_firmus_note
self.assertIn(interval, valid_intervals)
def test_solutions_have_correct_number_of_notes(self):
"""
Ensures that all solutions have the expected number of notes.
"""
result = create_population(20, CANTUS_FIRMUS)
expected_length = len(CANTUS_FIRMUS)
for genome in result:
self.assertEqual(expected_length, len(genome.chromosome))
class TestFitnessFunction(unittest.TestCase):
"""
Ensures that the fitness function works as expected.
"""
def test_make_fitness_function_returns_callable(self):
"""
Ensures the make_fitness_function returns a callable.
"""
result = make_fitness_function(CANTUS_FIRMUS)
self.assertTrue(callable(result))
def test_fitness_function_returns_float(self):
"""
Makes sure the generated fitness function returns a fitness score as a
float.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
result = fitness_function(genome)
self.assertTrue(float, type(result))
def test_fitness_function_sets_fitness_on_genome(self):
"""
Ensures the fitness score is set in the genome's fitness attribute and
is the same as the returned fitness score.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
self.assertEqual(None, genome.fitness)
result = fitness_function(genome)
self.assertNotEqual(None, genome.fitness)
self.assertEqual(result, genome.fitness)
def test_fitness_function_uses_cached_genome_fitness(self):
"""
Ensures the fitness function bails if there is already a score set for
the genome.
"""
fitness_function = make_fitness_function(CANTUS_FIRMUS)
genome = Genome([1, 2, 3])
genome.fitness = 12345
result = fitness_function(genome)
self.assertEqual(12345, result)
class TestHalt(unittest.TestCase):
"""
Ensure the halting function works as expected.
"""
def test_halt_expected(self):
"""
Ensure the function returns true if we're in a halting state.
"""
halt = make_halt_function([6, 5])
g1 = Genome([6, 5])
g1.fitness = MAX_REWARD
population = [g1, ]
result = halt(population, 1)
self.assertTrue(result)
def test_halt_checks_suspension_count(self):
"""
If the solution contains suspensions the halt function should ensure
that the MAX_REWARD is incremented by the number of suspensions
(rewarded because they're part of a valid step wise motion).
"""
halt = make_halt_function([9, 8, 7, 6, 5])
g1 = Genome([11, 10, 9, 8, 7])
# only one our of two "correct" dissonances
g1.fitness = MAX_REWARD + REWARD_SUSPENSION
population = [g1, ]
result = halt(population, 1)
self.assertFalse(result)
# Try again
# two out of two "correct" dissonances
g1.fitness = MAX_REWARD + (REWARD_SUSPENSION * 2)
population = [g1, ]
result = halt(population, 1)
self.assertTrue(result)
def test_halt_not(self):
"""
Ensures if the fittest genome has fitness < MAX_REWARD then halt
doesn't succeed.
"""
halt = make_halt_function([3, 2, 1])
g1 = Genome([1, 2, 3])
g1.fitness = MAX_REWARD - 0.1
g2 = Genome([1, 2, 3])
g2.fitness = 3
g3 = Genome([1, 2, 3])
g3.fitness = 2
# Any fittest solution with fitness < MAX_REWARD means no halt.
population = [g1, g2, g3]
result = halt(population, 1)
self.assertFalse(result)
class TestGenome(unittest.TestCase):
"""
Ensures that the Genome class is overridden as expected.
"""
def test_mutate_is_implemented(self):
"""
Ensures that we have a mutate method implemented.
"""
genome = Genome([1, 2, 3])
self.assertNotEqual(NotImplemented, genome.mutate(2, 0.2, [1, 2, 3]))
def test_mutate_bounded_by_arg_values(self):
"""
A rather contrived test but it proves that both the mutation_range and
mutation_rate are used correctly given the context given by a cantus
firmus.
"""
cantus_firmus = [1, 1, 1, 1, 1]
# mutate every time.
mutation_rate = 1
# will always mutate to thirds above the cf note.
mutation_range = 2
genome = Genome([5, 6, 7, 8, 9])
genome.mutate(mutation_range, mutation_rate, cantus_firmus)
self.assertEqual([3, 3, 3, 3, 3], genome.chromosome)
| LoganRickert/foox | test/species/test_fourth.py | Python | mit | 6,328 | 0.000474 |
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
from astropy.io import ascii
import astropy.coordinates as coord
import astropy.units as u
from astropy.time import Time
from astropy.time import TimeDelta
import os
'''
Read in 1m observation metadata to figure out:
- which stars were imaged when
- whether they were in eclipse, or not
- some kind of image preview or quality flag (NOT IMPLEMENTED YET)
- make a plot of this info
To work, this program needs to be saved somewhere that can see imagedir and reffile.
It assumes there are date-formatted subdirectories in imagedir (e.g. 150212) with FITS
files saved in them. Some of those FITS files might be for targets we don't care about.
***IMPORTANT NOTE***
You will need to manually inspect the outfile and edit it before running imagereduce.py:
- replace each filter entry with a short (6 chars or fewer) string with no spaces
- ensure each filename starts with '1' (some may be truncated)
'''
#imagedir = '/mnt/mrawls/1m_obs/'
imagedir = '/virgo/mrawls/1mphot/'
reffile = 'RGEB_info_alpha.txt'
outfile = 'imginventory_list3.txt'
# Get the paths to the directories in imagedir which are 2014 or 2015 date format
dirs = [x for x in os.listdir(imagedir) if x[0:2] == '14' or x[0:2] == '15']
fulldirs = [imagedir+x+'/' for x in dirs]
# Read in reference data for the targets
refdata = ascii.read(reffile)
KICs = refdata['col1']
Porbs = refdata['col2']
BJD0s = refdata['col3']
RAs = refdata['col7']
Decs = refdata['col8']
# Create astropy Time objects for the zeropoints and orbital periods
Porbs_time = []; BJD0s_time = []
for Porb, BJD0 in zip(Porbs, BJD0s):
Porbs_time.append(TimeDelta(Porb, format='jd')) # duration of one orbit
BJD0s_time.append(Time(BJD0+2400000.0, format='jd', scale='utc')) # time of primary eclipse
# Eclipse timing information
pwid = refdata['col4']
swid = refdata['col5']
sep = refdata['col6']
# Find the files that are FITS images
# Save the date, time, RA, Dec, and filter from the header, as well as the filename
# Keep going if a file can't be opened for any reason
dateobs = []; UTobs = []
RAobs = []; Decobs = []
filtnameobs = []; filenamesave = []
for dir in fulldirs:
filesindir = os.listdir(dir)
for filename in filesindir:
# Keep only fits files that are not guiding images
if filename[-4:] == ('fits' or 'FITS') and 'g.' not in filename and 'flat' not in filename:
fullfile = dir+filename
#print(fullfile)
try:
hdu = fits.open(fullfile, ignore_missing_end = True)
except:
print('Error accessing {0}'.format(fullfile))
continue
else:
dateobs.append(hdu[0].header['date-obs'])
UTobs.append(hdu[0].header['UT'])
RAobs.append(hdu[0].header['RA'])
Decobs.append(hdu[0].header['Dec'])
filtnameobs.append(hdu[0].header['filtname'][0:17])
filenamesave.append(fullfile)
# Put RA and Dec values into less annoying formats
print('Done reading image files')
RAs = coord.Angle(RAs, unit=u.hour)
RAobs = coord.Angle(RAobs, unit=u.hour)
Decs = coord.Angle(Decs, unit=u.degree)
Decobs = coord.Angle(Decobs, unit=u.degree)
# Identify which catalog RA and Dec value are closest to the observed ones
# If the closest RA and closest Dec have the same index, assign the appropriate KIC
# Otherwise, assume the file doesn't contain a target of interest
print('Identifying which observations belong to KIC objects...')
KICobs = []
for RA, Dec in zip(RAobs, Decobs):
idx1 = min(range(len(RAs)), key=lambda i: abs(RAs[i] - RA))
idx2 = min(range(len(Decs)), key=lambda i: abs(Decs[i] - Dec))
if idx1 == idx2:
KICobs.append(KICs[idx1])
else:
KICobs.append('None')
# Keep only the good observations that have assigned KICS
# Consolidate the time and date info into a single object
# Keep track of the filters and filenames
KICgoods = []; datetimes = []; RAgoods = []; Decgoods = []; filtgoods = []; filenamegoods = []
for KIC, date, UT, RA, Dec, filtname, file in zip(KICobs, dateobs, UTobs, RAobs, Decobs, filtnameobs, filenamesave):
if KIC != 'None':
KICgoods.append(KIC)
datetimes.append(Time(str(date)+'T'+str(UT), format='isot', scale='utc'))
RAgoods.append(RA)
Decgoods.append(Dec)
filtgoods.append(filtname)
filenamegoods.append(file)
print('Done')
# Create a new list that contains a list of observation times for each object
# Keep track of the corresponding filters and filenames
# Initialize a parallel list that assumes all observations are not in eclipse, for now
observations = [[] for x in xrange(len(KICs))]
filtrefs = [[] for x in xrange(len(KICs))]
filenamerefs = [[] for x in xrange(len(KICs))]
isEclipse = [[] for x in xrange(len(KICs))]
for idx, (KIC, Porb, BJD0) in enumerate(zip(KICs, Porbs, BJD0s)): # loop over systems
for KIC_obs, datetime_obs, filt_ref, file_ref in zip(KICgoods, datetimes, filtgoods, filenamegoods): # loop over observations
if KIC_obs == KIC:
observations[idx].append(datetime_obs)
filtrefs[idx].append(filt_ref)
filenamerefs[idx].append(file_ref)
isEclipse[idx].append('No ')
# Define start and end of observation windows, with a 10-day buffer for plotting purposes
obs_tstart = min(datetimes) - TimeDelta(10, format='jd')
obs_tend = max(datetimes) + TimeDelta(10, format='jd')
# Calculate eclipse start and end points that fall within the observation window
# (This is blatantly stolen/adapted from 'eclipsefinder.py')
# These eclipse durations do NOT include any extra time for ensuring 100% of ingress/egress
print('Calculating eclipse times...')
print('New BJD0 values immediately before the obs window (KIC, newBJD0, start, end):')
pri_eclipse_mid = [[] for x in xrange(len(KICs))]
pri_eclipse_mid = [[] for x in xrange(len(KICs))]
sec_eclipse_mid = [[] for x in xrange(len(KICs))]
pri_eclipse_start = [[] for x in xrange(len(KICs))]
pri_eclipse_end = [[] for x in xrange(len(KICs))]
sec_eclipse_start = [[] for x in xrange(len(KICs))]
sec_eclipse_end = [[] for x in xrange(len(KICs))]
for j in range(0,len(KICs)): # j is the *object* index here
# Find the most recent bjd0 time right BEFORE the observation window of interest
newbjd0_float = np.floor((obs_tstart.jd - BJD0s_time[j].jd)/Porbs_time[j].value) * Porbs_time[j].value + BJD0s_time[j].jd
newbjd0 = Time(newbjd0_float, format='jd', scale='utc')
print(KICs[j], newbjd0_float, obs_tstart.jd, obs_tend.jd)
for i in range(0,len(observations[j])): # i is the *observation* index here
# Save eclipse midpoints
pri_eclipse_mid[j].append(newbjd0 + i*Porbs_time[j])
sec_eclipse_mid[j].append(newbjd0 + i*Porbs_time[j] + sep[j]*Porbs_time[j])
# Save primary eclipse start & end times
pri_eclipse_start[j].append(pri_eclipse_mid[j][i] - pwid[j]*Porbs_time[j]/2)
pri_eclipse_end[j].append(pri_eclipse_mid[j][i] + pwid[j]*Porbs_time[j]/2)
# Save secondary eclipse start & end times
sec_eclipse_start[j].append(sec_eclipse_mid[j][i] - swid[j]*Porbs_time[j]/2)
sec_eclipse_end[j].append(sec_eclipse_mid[j][i] + swid[j]*Porbs_time[j]/2)
print('Done')
# Make a plot as a function of time
# Color-code points by whether they're in eclipse or not (red = primary, blue = secondary)
# Finally, save all the observation info to file
plt.figure(1, figsize=(17,12))
plt.yticks(range(0,len(KICs)), ['%.0f' % a for a in KICs])
plt.axis([obs_tstart.plot_date, obs_tend.plot_date, -1, len(KICs)])
print('Preparing plot and writing to outfile (be patient)...')
outfilelist = open(outfile, 'w')
for idx, KIC in enumerate(KICs): # loop over systems
print(KIC, '...')
for jdx, (obs, filt, file) in enumerate(zip(observations[idx], filtrefs[idx], filenamerefs[idx])): # loop over observations
plt.plot_date(obs.plot_date, idx, marker='o', mec=None, mfc='0.75') # plot all observations
for start, end in zip(pri_eclipse_start[idx], pri_eclipse_end[idx]): # PRIMARY ECLIPSE
if start.jd <= obs.jd <= end.jd:
isEclipse[idx][jdx] = 'Pri' # change value in isEclipse if appropriate
plt.plot_date(obs.plot_date, idx, marker='o', mec=None, mfc='r')
for start, end in zip(sec_eclipse_start[idx], sec_eclipse_end[idx]): # SECONDARY ECLIPSE
if start.jd <= obs.jd <= end.jd:
isEclipse[idx][jdx] = 'Sec' # change value in isEclipse if appropriate
plt.plot_date(obs.plot_date, idx, marker='o', mec=None, mfc='b')
print(KIC, obs.jd, isEclipse[idx][jdx], filt, file[-15:], file=outfilelist)
outfilelist.close()
plt.gcf().autofmt_xdate() # for slanty dates on the Time x-axis
# Option to show or save figure
#plt.show()
plt.savefig('imginventory_plot.png')
| mrawls/APO-1m-phot | imginventory.py | Python | mit | 9,017 | 0.007541 |
"""Support for RFXtrx devices."""
import binascii
from collections import OrderedDict
import logging
import RFXtrx as rfxtrxmod
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_NAME,
ATTR_STATE,
CONF_DEVICE,
CONF_DEVICES,
CONF_HOST,
CONF_PORT,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
POWER_WATT,
TEMP_CELSIUS,
UNIT_PERCENTAGE,
UV_INDEX,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
DOMAIN = "rfxtrx"
DEFAULT_SIGNAL_REPETITIONS = 1
ATTR_AUTOMATIC_ADD = "automatic_add"
ATTR_DEVICE = "device"
ATTR_DEBUG = "debug"
ATTR_FIRE_EVENT = "fire_event"
ATTR_DATA_TYPE = "data_type"
ATTR_DUMMY = "dummy"
CONF_DATA_BITS = "data_bits"
CONF_AUTOMATIC_ADD = "automatic_add"
CONF_DATA_TYPE = "data_type"
CONF_SIGNAL_REPETITIONS = "signal_repetitions"
CONF_FIRE_EVENT = "fire_event"
CONF_DUMMY = "dummy"
CONF_DEBUG = "debug"
CONF_OFF_DELAY = "off_delay"
EVENT_BUTTON_PRESSED = "button_pressed"
DATA_TYPES = OrderedDict(
[
("Temperature", TEMP_CELSIUS),
("Temperature2", TEMP_CELSIUS),
("Humidity", UNIT_PERCENTAGE),
("Barometer", ""),
("Wind direction", ""),
("Rain rate", ""),
("Energy usage", POWER_WATT),
("Total usage", POWER_WATT),
("Sound", ""),
("Sensor Status", ""),
("Counter value", ""),
("UV", UV_INDEX),
("Humidity status", ""),
("Forecast", ""),
("Forecast numeric", ""),
("Rain total", ""),
("Wind average speed", ""),
("Wind gust", ""),
("Chill", ""),
("Total usage", ""),
("Count", ""),
("Current Ch. 1", ""),
("Current Ch. 2", ""),
("Current Ch. 3", ""),
("Energy usage", ""),
("Voltage", ""),
("Current", ""),
("Battery numeric", ""),
("Rssi numeric", ""),
]
)
RECEIVED_EVT_SUBSCRIBERS = []
RFX_DEVICES = {}
_LOGGER = logging.getLogger(__name__)
DATA_RFXOBJECT = "rfxobject"
BASE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_DEBUG, default=False): cv.boolean,
vol.Optional(CONF_DUMMY, default=False): cv.boolean,
}
)
DEVICE_SCHEMA = BASE_SCHEMA.extend({vol.Required(CONF_DEVICE): cv.string})
PORT_SCHEMA = BASE_SCHEMA.extend(
{vol.Required(CONF_PORT): cv.port, vol.Optional(CONF_HOST): cv.string}
)
CONFIG_SCHEMA = vol.Schema(
{DOMAIN: vol.Any(DEVICE_SCHEMA, PORT_SCHEMA)}, extra=vol.ALLOW_EXTRA
)
def setup(hass, config):
"""Set up the RFXtrx component."""
# Declare the Handle event
def handle_receive(event):
"""Handle received messages from RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
_LOGGER.debug(
"Receive RFXCOM event from "
"(Device_id: %s Class: %s Sub: %s, Pkt_id: %s)",
slugify(event.device.id_string.lower()),
event.device.__class__.__name__,
event.device.subtype,
"".join(f"{x:02x}" for x in event.data),
)
# Callback to HA registered components.
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
device = config[DOMAIN].get(ATTR_DEVICE)
host = config[DOMAIN].get(CONF_HOST)
port = config[DOMAIN].get(CONF_PORT)
debug = config[DOMAIN][ATTR_DEBUG]
dummy_connection = config[DOMAIN][ATTR_DUMMY]
if dummy_connection:
rfx_object = rfxtrxmod.Connect(
device, None, debug=debug, transport_protocol=rfxtrxmod.DummyTransport2,
)
elif port is not None:
# If port is set then we create a TCP connection
rfx_object = rfxtrxmod.Connect(
(host, port),
None,
debug=debug,
transport_protocol=rfxtrxmod.PyNetworkTransport,
)
else:
rfx_object = rfxtrxmod.Connect(device, None, debug=debug)
def _start_rfxtrx(event):
rfx_object.event_callback = handle_receive
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, _start_rfxtrx)
def _shutdown_rfxtrx(event):
"""Close connection with RFXtrx."""
rfx_object.close_connection()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown_rfxtrx)
hass.data[DATA_RFXOBJECT] = rfx_object
return True
def get_rfx_object(packetid):
"""Return the RFXObject with the packetid."""
try:
binarypacket = bytearray.fromhex(packetid)
except ValueError:
return None
pkt = rfxtrxmod.lowlevel.parse(binarypacket)
if pkt is None:
return None
if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket):
obj = rfxtrxmod.SensorEvent(pkt)
elif isinstance(pkt, rfxtrxmod.lowlevel.Status):
obj = rfxtrxmod.StatusEvent(pkt)
else:
obj = rfxtrxmod.ControlEvent(pkt)
return obj
def get_pt2262_deviceid(device_id, nb_data_bits):
"""Extract and return the address bits from a Lighting4/PT2262 packet."""
if nb_data_bits is None:
return
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ~((1 << nb_data_bits) - 1)
data[len(data) - 1] &= mask
return binascii.hexlify(data)
def get_pt2262_cmd(device_id, data_bits):
"""Extract and return the data bits from a Lighting4/PT2262 packet."""
try:
data = bytearray.fromhex(device_id)
except ValueError:
return None
mask = 0xFF & ((1 << data_bits) - 1)
return hex(data[-1] & mask)
def get_pt2262_device(device_id):
"""Look for the device which id matches the given device_id parameter."""
for device in RFX_DEVICES.values():
if (
hasattr(device, "is_lighting4")
and device.masked_id is not None
and device.masked_id == get_pt2262_deviceid(device_id, device.data_bits)
):
_LOGGER.debug(
"rfxtrx: found matching device %s for %s", device_id, device.masked_id,
)
return device
return None
def find_possible_pt2262_device(device_id):
"""Look for the device which id matches the given device_id parameter."""
for dev_id, device in RFX_DEVICES.items():
if hasattr(device, "is_lighting4") and len(dev_id) == len(device_id):
size = None
for i, (char1, char2) in enumerate(zip(dev_id, device_id)):
if char1 != char2:
break
size = i
if size is not None:
size = len(dev_id) - size - 1
_LOGGER.info(
"rfxtrx: found possible device %s for %s "
"with the following configuration:\n"
"data_bits=%d\n"
"command_on=0x%s\n"
"command_off=0x%s\n",
device_id,
dev_id,
size * 4,
dev_id[-size:],
device_id[-size:],
)
return device
return None
def get_devices_from_config(config, device):
"""Read rfxtrx configuration."""
signal_repetitions = config[CONF_SIGNAL_REPETITIONS]
devices = []
for packet_id, entity_info in config[CONF_DEVICES].items():
event = get_rfx_object(packet_id)
if event is None:
_LOGGER.error("Invalid device: %s", packet_id)
continue
device_id = slugify(event.device.id_string.lower())
if device_id in RFX_DEVICES:
continue
_LOGGER.debug("Add %s rfxtrx", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info[ATTR_FIRE_EVENT]
datas = {ATTR_STATE: False, ATTR_FIRE_EVENT: fire_event}
new_device = device(entity_info[ATTR_NAME], event, datas, signal_repetitions)
RFX_DEVICES[device_id] = new_device
devices.append(new_device)
return devices
def get_new_device(event, config, device):
"""Add entity if not exist and the automatic_add is True."""
device_id = slugify(event.device.id_string.lower())
if device_id in RFX_DEVICES:
return
if not config[ATTR_AUTOMATIC_ADD]:
return
pkt_id = "".join(f"{x:02x}" for x in event.data)
_LOGGER.debug(
"Automatic add %s rfxtrx device (Class: %s Sub: %s Packet_id: %s)",
device_id,
event.device.__class__.__name__,
event.device.subtype,
pkt_id,
)
datas = {ATTR_STATE: False, ATTR_FIRE_EVENT: False}
signal_repetitions = config[CONF_SIGNAL_REPETITIONS]
new_device = device(pkt_id, event, datas, signal_repetitions)
RFX_DEVICES[device_id] = new_device
return new_device
def apply_received_command(event):
"""Apply command from rfxtrx."""
device_id = slugify(event.device.id_string.lower())
# Check if entity exists or previously added automatically
if device_id not in RFX_DEVICES:
return
_LOGGER.debug(
"Device_id: %s device_update. Command: %s", device_id, event.values["Command"],
)
if event.values["Command"] in [
"On",
"Off",
"Up",
"Down",
"Stop",
"Open (inline relay)",
"Close (inline relay)",
"Stop (inline relay)",
]:
# Update the rfxtrx device state
command = event.values["Command"]
if command in [
"On",
"Up",
"Stop",
"Open (inline relay)",
"Stop (inline relay)",
]:
is_on = True
elif command in ["Off", "Down", "Close (inline relay)"]:
is_on = False
RFX_DEVICES[device_id].update_state(is_on)
elif (
hasattr(RFX_DEVICES[device_id], "brightness")
and event.values["Command"] == "Set level"
):
_brightness = event.values["Dim level"] * 255 // 100
# Update the rfxtrx device state
is_on = _brightness > 0
RFX_DEVICES[device_id].update_state(is_on, _brightness)
# Fire event
if RFX_DEVICES[device_id].should_fire_event:
RFX_DEVICES[device_id].hass.bus.fire(
EVENT_BUTTON_PRESSED,
{
ATTR_ENTITY_ID: RFX_DEVICES[device_id].entity_id,
ATTR_STATE: event.values["Command"].lower(),
},
)
_LOGGER.debug(
"Rfxtrx fired event: (event_type: %s, %s: %s, %s: %s)",
EVENT_BUTTON_PRESSED,
ATTR_ENTITY_ID,
RFX_DEVICES[device_id].entity_id,
ATTR_STATE,
event.values["Command"].lower(),
)
class RfxtrxDevice(Entity):
"""Represents a Rfxtrx device.
Contains the common logic for Rfxtrx lights and switches.
"""
def __init__(self, name, event, datas, signal_repetitions):
"""Initialize the device."""
self.signal_repetitions = signal_repetitions
self._name = name
self._event = event
self._state = datas[ATTR_STATE]
self._should_fire_event = datas[ATTR_FIRE_EVENT]
self._brightness = 0
self._unique_id = f"{slugify(self._event.device.type_string.lower())}_{slugify(self._event.device.id_string.lower())}"
self.added_to_hass = False
async def async_added_to_hass(self):
"""Subscribe RFXtrx events."""
self.added_to_hass = True
@property
def should_poll(self):
"""No polling needed for a RFXtrx switch."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def should_fire_event(self):
"""Return is the device must fire event."""
return self._should_fire_event
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if unable to access real state of entity."""
return True
@property
def unique_id(self):
"""Return unique identifier of remote device."""
return self._unique_id
def turn_off(self, **kwargs):
"""Turn the device off."""
self._send_command("turn_off")
def update_state(self, state, brightness=0):
"""Update det state of the device."""
self._state = state
self._brightness = brightness
if self.added_to_hass:
self.schedule_update_ha_state()
def _send_command(self, command, brightness=0):
if not self._event:
return
rfx_object = self.hass.data[DATA_RFXOBJECT]
if command == "turn_on":
for _ in range(self.signal_repetitions):
self._event.device.send_on(rfx_object.transport)
self._state = True
elif command == "dim":
for _ in range(self.signal_repetitions):
self._event.device.send_dim(rfx_object.transport, brightness)
self._state = True
elif command == "turn_off":
for _ in range(self.signal_repetitions):
self._event.device.send_off(rfx_object.transport)
self._state = False
self._brightness = 0
elif command == "roll_up":
for _ in range(self.signal_repetitions):
self._event.device.send_open(rfx_object.transport)
self._state = True
elif command == "roll_down":
for _ in range(self.signal_repetitions):
self._event.device.send_close(rfx_object.transport)
self._state = False
elif command == "stop_roll":
for _ in range(self.signal_repetitions):
self._event.device.send_stop(rfx_object.transport)
self._state = True
if self.added_to_hass:
self.schedule_update_ha_state()
| nkgilley/home-assistant | homeassistant/components/rfxtrx/__init__.py | Python | apache-2.0 | 13,930 | 0.000431 |
class Where:
string = ''
| pannellr/3132GroupProject | modules/database/where.py | Python | unlicense | 34 | 0.029412 |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import json
# External imports
# Bokeh imports
from ..util.sampledata import external_path
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'data',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
data = json.load(open(external_path('us_cities.json')))
| timsnyder/bokeh | bokeh/sampledata/us_cities.py | Python | bsd-3-clause | 1,884 | 0.010085 |
"""Provide functionality to interact with Cast devices on the network."""
from __future__ import annotations
import asyncio
from contextlib import suppress
from datetime import datetime, timedelta
import functools as ft
import json
import logging
from urllib.parse import quote
import pychromecast
from pychromecast.controllers.homeassistant import HomeAssistantController
from pychromecast.controllers.multizone import MultizoneManager
from pychromecast.controllers.plex import PlexController
from pychromecast.controllers.receiver import VOLUME_CONTROL_TYPE_FIXED
from pychromecast.quick_play import quick_play
from pychromecast.socket_client import (
CONNECTION_STATUS_CONNECTED,
CONNECTION_STATUS_DISCONNECTED,
)
import voluptuous as vol
from homeassistant.auth.models import RefreshToken
from homeassistant.components import media_source, zeroconf
from homeassistant.components.http.auth import async_sign_path
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
ATTR_MEDIA_EXTRA,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_TVSHOW,
SUPPORT_BROWSE_MEDIA,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PLAY_MEDIA,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
)
from homeassistant.components.plex.const import PLEX_URI_SCHEME
from homeassistant.components.plex.services import lookup_plex_media
from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.network import NoURLAvailableError, get_url
import homeassistant.util.dt as dt_util
from homeassistant.util.logging import async_create_catching_coro
from .const import (
ADDED_CAST_DEVICES_KEY,
CAST_MULTIZONE_MANAGER_KEY,
CONF_IGNORE_CEC,
CONF_UUID,
DOMAIN as CAST_DOMAIN,
SIGNAL_CAST_DISCOVERED,
SIGNAL_CAST_REMOVED,
SIGNAL_HASS_CAST_SHOW_VIEW,
)
from .discovery import setup_internal_discovery
from .helpers import CastStatusListener, ChromecastInfo, ChromeCastZeroconf
_LOGGER = logging.getLogger(__name__)
CAST_SPLASH = "https://www.home-assistant.io/images/cast/splash.png"
SUPPORT_CAST = (
SUPPORT_PAUSE
| SUPPORT_PLAY
| SUPPORT_PLAY_MEDIA
| SUPPORT_STOP
| SUPPORT_TURN_OFF
| SUPPORT_TURN_ON
)
ENTITY_SCHEMA = vol.All(
vol.Schema(
{
vol.Optional(CONF_UUID): cv.string,
vol.Optional(CONF_IGNORE_CEC): vol.All(cv.ensure_list, [cv.string]),
}
),
)
@callback
def _async_create_cast_device(hass: HomeAssistant, info: ChromecastInfo):
"""Create a CastDevice Entity from the chromecast object.
Returns None if the cast device has already been added.
"""
_LOGGER.debug("_async_create_cast_device: %s", info)
if info.uuid is None:
_LOGGER.error("_async_create_cast_device uuid none: %s", info)
return None
# Found a cast with UUID
added_casts = hass.data[ADDED_CAST_DEVICES_KEY]
if info.uuid in added_casts:
# Already added this one, the entity will take care of moved hosts
# itself
return None
# -> New cast device
added_casts.add(info.uuid)
if info.is_dynamic_group:
# This is a dynamic group, do not add it but connect to the service.
group = DynamicCastGroup(hass, info)
group.async_setup()
return None
return CastDevice(info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up Cast from a config entry."""
hass.data.setdefault(ADDED_CAST_DEVICES_KEY, set())
# Import CEC IGNORE attributes
pychromecast.IGNORE_CEC += config_entry.data.get(CONF_IGNORE_CEC) or []
wanted_uuids = config_entry.data.get(CONF_UUID) or None
@callback
def async_cast_discovered(discover: ChromecastInfo) -> None:
"""Handle discovery of a new chromecast."""
# If wanted_uuids is set, we're only accepting specific cast devices identified
# by UUID
if wanted_uuids is not None and discover.uuid not in wanted_uuids:
# UUID not matching, ignore.
return
cast_device = _async_create_cast_device(hass, discover)
if cast_device is not None:
async_add_entities([cast_device])
async_dispatcher_connect(hass, SIGNAL_CAST_DISCOVERED, async_cast_discovered)
ChromeCastZeroconf.set_zeroconf(await zeroconf.async_get_instance(hass))
hass.async_add_executor_job(setup_internal_discovery, hass, config_entry)
class CastDevice(MediaPlayerEntity):
"""Representation of a Cast device on the network.
This class is the holder of the pychromecast.Chromecast object and its
socket client. It therefore handles all reconnects and audio group changing
"elected leader" itself.
"""
_attr_should_poll = False
_attr_media_image_remotely_accessible = True
def __init__(self, cast_info: ChromecastInfo) -> None:
"""Initialize the cast device."""
self._cast_info = cast_info
self.services = cast_info.services
self._chromecast: pychromecast.Chromecast | None = None
self.cast_status = None
self.media_status = None
self.media_status_received = None
self.mz_media_status: dict[str, pychromecast.controllers.media.MediaStatus] = {}
self.mz_media_status_received: dict[str, datetime] = {}
self.mz_mgr = None
self._attr_available = False
self._status_listener: CastStatusListener | None = None
self._hass_cast_controller: HomeAssistantController | None = None
self._add_remove_handler = None
self._cast_view_remove_handler = None
self._attr_unique_id = cast_info.uuid
self._attr_name = cast_info.friendly_name
if cast_info.model_name != "Google Cast Group":
self._attr_device_info = {
"name": str(cast_info.friendly_name),
"identifiers": {(CAST_DOMAIN, str(cast_info.uuid).replace("-", ""))},
"model": cast_info.model_name,
"manufacturer": str(cast_info.manufacturer),
}
async def async_added_to_hass(self):
"""Create chromecast object when added to hass."""
self._add_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_CAST_DISCOVERED, self._async_cast_discovered
)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._async_stop)
self.async_set_cast_info(self._cast_info)
# asyncio.create_task is used to avoid delaying startup wrapup if the device
# is discovered already during startup but then fails to respond
asyncio.create_task(
async_create_catching_coro(self.async_connect_to_chromecast())
)
self._cast_view_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_HASS_CAST_SHOW_VIEW, self._handle_signal_show_view
)
async def async_will_remove_from_hass(self) -> None:
"""Disconnect Chromecast object when removed."""
await self._async_disconnect()
if self._add_remove_handler:
self._add_remove_handler()
self._add_remove_handler = None
if self._cast_view_remove_handler:
self._cast_view_remove_handler()
self._cast_view_remove_handler = None
def async_set_cast_info(self, cast_info):
"""Set the cast information."""
self._cast_info = cast_info
async def async_connect_to_chromecast(self):
"""Set up the chromecast object."""
_LOGGER.debug(
"[%s %s] Connecting to cast device by service %s",
self.entity_id,
self._cast_info.friendly_name,
self.services,
)
chromecast = await self.hass.async_add_executor_job(
pychromecast.get_chromecast_from_cast_info,
pychromecast.discovery.CastInfo(
self.services,
self._cast_info.uuid,
self._cast_info.model_name,
self._cast_info.friendly_name,
None,
None,
),
ChromeCastZeroconf.get_zeroconf(),
)
self._chromecast = chromecast
if CAST_MULTIZONE_MANAGER_KEY not in self.hass.data:
self.hass.data[CAST_MULTIZONE_MANAGER_KEY] = MultizoneManager()
self.mz_mgr = self.hass.data[CAST_MULTIZONE_MANAGER_KEY]
self._status_listener = CastStatusListener(self, chromecast, self.mz_mgr)
self._attr_available = False
self.cast_status = chromecast.status
self.media_status = chromecast.media_controller.status
self._chromecast.start()
self.async_write_ha_state()
async def _async_disconnect(self):
"""Disconnect Chromecast object if it is set."""
if self._chromecast is None:
# Can't disconnect if not connected.
return
_LOGGER.debug(
"[%s %s] Disconnecting from chromecast socket",
self.entity_id,
self._cast_info.friendly_name,
)
self._attr_available = False
self.async_write_ha_state()
await self.hass.async_add_executor_job(self._chromecast.disconnect)
self._invalidate()
self.async_write_ha_state()
def _invalidate(self):
"""Invalidate some attributes."""
self._chromecast = None
self.cast_status = None
self.media_status = None
self.media_status_received = None
self.mz_media_status = {}
self.mz_media_status_received = {}
self.mz_mgr = None
self._hass_cast_controller = None
if self._status_listener is not None:
self._status_listener.invalidate()
self._status_listener = None
# ========== Callbacks ==========
def new_cast_status(self, cast_status):
"""Handle updates of the cast status."""
self.cast_status = cast_status
self._attr_volume_level = cast_status.volume_level if cast_status else None
self._attr_is_volume_muted = (
cast_status.volume_muted if self.cast_status else None
)
self.schedule_update_ha_state()
def new_media_status(self, media_status):
"""Handle updates of the media status."""
if (
media_status
and media_status.player_is_idle
and media_status.idle_reason == "ERROR"
):
external_url = None
internal_url = None
tts_base_url = None
url_description = ""
if "tts" in self.hass.config.components:
with suppress(KeyError): # base_url not configured
tts_base_url = self.hass.components.tts.get_base_url(self.hass)
with suppress(NoURLAvailableError): # external_url not configured
external_url = get_url(self.hass, allow_internal=False)
with suppress(NoURLAvailableError): # internal_url not configured
internal_url = get_url(self.hass, allow_external=False)
if media_status.content_id:
if tts_base_url and media_status.content_id.startswith(tts_base_url):
url_description = f" from tts.base_url ({tts_base_url})"
if external_url and media_status.content_id.startswith(external_url):
url_description = f" from external_url ({external_url})"
if internal_url and media_status.content_id.startswith(internal_url):
url_description = f" from internal_url ({internal_url})"
_LOGGER.error(
"Failed to cast media %s%s. Please make sure the URL is: "
"Reachable from the cast device and either a publicly resolvable "
"hostname or an IP address",
media_status.content_id,
url_description,
)
self.media_status = media_status
self.media_status_received = dt_util.utcnow()
self.schedule_update_ha_state()
def new_connection_status(self, connection_status):
"""Handle updates of connection status."""
_LOGGER.debug(
"[%s %s] Received cast device connection status: %s",
self.entity_id,
self._cast_info.friendly_name,
connection_status.status,
)
if connection_status.status == CONNECTION_STATUS_DISCONNECTED:
self._attr_available = False
self._invalidate()
self.schedule_update_ha_state()
return
new_available = connection_status.status == CONNECTION_STATUS_CONNECTED
if new_available != self.available:
# Connection status callbacks happen often when disconnected.
# Only update state when availability changed to put less pressure
# on state machine.
_LOGGER.debug(
"[%s %s] Cast device availability changed: %s",
self.entity_id,
self._cast_info.friendly_name,
connection_status.status,
)
self._attr_available = new_available
self.schedule_update_ha_state()
def multizone_new_media_status(self, group_uuid, media_status):
"""Handle updates of audio group media status."""
_LOGGER.debug(
"[%s %s] Multizone %s media status: %s",
self.entity_id,
self._cast_info.friendly_name,
group_uuid,
media_status,
)
self.mz_media_status[group_uuid] = media_status
self.mz_media_status_received[group_uuid] = dt_util.utcnow()
self.schedule_update_ha_state()
# ========== Service Calls ==========
def _media_controller(self):
"""
Return media controller.
First try from our own cast, then groups which our cast is a member in.
"""
media_status = self.media_status
media_controller = self._chromecast.media_controller
if media_status is None or media_status.player_state == "UNKNOWN":
groups = self.mz_media_status
for k, val in groups.items():
if val and val.player_state != "UNKNOWN":
media_controller = self.mz_mgr.get_multizone_mediacontroller(k)
break
return media_controller
def turn_on(self):
"""Turn on the cast device."""
if not self._chromecast.is_idle:
# Already turned on
return
if self._chromecast.app_id is not None:
# Quit the previous app before starting splash screen
self._chromecast.quit_app()
# The only way we can turn the Chromecast is on is by launching an app
self._chromecast.play_media(CAST_SPLASH, pychromecast.STREAM_TYPE_BUFFERED)
def turn_off(self):
"""Turn off the cast device."""
self._chromecast.quit_app()
def mute_volume(self, mute):
"""Mute the volume."""
self._chromecast.set_volume_muted(mute)
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._chromecast.set_volume(volume)
def media_play(self):
"""Send play command."""
media_controller = self._media_controller()
media_controller.play()
def media_pause(self):
"""Send pause command."""
media_controller = self._media_controller()
media_controller.pause()
def media_stop(self):
"""Send stop command."""
media_controller = self._media_controller()
media_controller.stop()
def media_previous_track(self):
"""Send previous track command."""
media_controller = self._media_controller()
media_controller.queue_prev()
def media_next_track(self):
"""Send next track command."""
media_controller = self._media_controller()
media_controller.queue_next()
def media_seek(self, position):
"""Seek the media to a specific location."""
media_controller = self._media_controller()
media_controller.seek(position)
async def async_browse_media(self, media_content_type=None, media_content_id=None):
"""Implement the websocket media browsing helper."""
result = await media_source.async_browse_media(self.hass, media_content_id)
return result
async def async_play_media(self, media_type, media_id, **kwargs):
"""Play a piece of media."""
# Handle media_source
if media_source.is_media_source_id(media_id):
sourced_media = await media_source.async_resolve_media(self.hass, media_id)
media_type = sourced_media.mime_type
media_id = sourced_media.url
# If media ID is a relative URL, we serve it from HA.
# Create a signed path.
if media_id[0] == "/":
# Sign URL with Home Assistant Cast User
config_entry_id = self.registry_entry.config_entry_id
config_entry = self.hass.config_entries.async_get_entry(config_entry_id)
user_id = config_entry.data["user_id"]
user = await self.hass.auth.async_get_user(user_id)
if user.refresh_tokens:
refresh_token: RefreshToken = list(user.refresh_tokens.values())[0]
media_id = async_sign_path(
self.hass,
refresh_token.id,
quote(media_id),
timedelta(seconds=media_source.DEFAULT_EXPIRY_TIME),
)
# prepend external URL
hass_url = get_url(self.hass, prefer_external=True)
media_id = f"{hass_url}{media_id}"
await self.hass.async_add_executor_job(
ft.partial(self.play_media, media_type, media_id, **kwargs)
)
def play_media(self, media_type, media_id, **kwargs):
"""Play media from a URL."""
extra = kwargs.get(ATTR_MEDIA_EXTRA, {})
metadata = extra.get("metadata")
# We do not want this to be forwarded to a group
if media_type == CAST_DOMAIN:
try:
app_data = json.loads(media_id)
if metadata is not None:
app_data["metadata"] = extra.get("metadata")
except json.JSONDecodeError:
_LOGGER.error("Invalid JSON in media_content_id")
raise
# Special handling for passed `app_id` parameter. This will only launch
# an arbitrary cast app, generally for UX.
if "app_id" in app_data:
app_id = app_data.pop("app_id")
_LOGGER.info("Starting Cast app by ID %s", app_id)
self._chromecast.start_app(app_id)
if app_data:
_LOGGER.warning(
"Extra keys %s were ignored. Please use app_name to cast media",
app_data.keys(),
)
return
app_name = app_data.pop("app_name")
try:
quick_play(self._chromecast, app_name, app_data)
except NotImplementedError:
_LOGGER.error("App %s not supported", app_name)
# Handle plex
elif media_id and media_id.startswith(PLEX_URI_SCHEME):
media_id = media_id[len(PLEX_URI_SCHEME) :]
media, _ = lookup_plex_media(self.hass, media_type, media_id)
if media is None:
return
controller = PlexController()
self._chromecast.register_handler(controller)
controller.play_media(media)
else:
self._chromecast.media_controller.play_media(
media_id, media_type, **kwargs.get(ATTR_MEDIA_EXTRA, {})
)
def _media_status(self):
"""
Return media status.
First try from our own cast, then groups which our cast is a member in.
"""
media_status = self.media_status
media_status_received = self.media_status_received
if media_status is None or media_status.player_state == "UNKNOWN":
groups = self.mz_media_status
for k, val in groups.items():
if val and val.player_state != "UNKNOWN":
media_status = val
media_status_received = self.mz_media_status_received[k]
break
return (media_status, media_status_received)
@property
def state(self):
"""Return the state of the player."""
if (media_status := self._media_status()[0]) is None:
return None
if media_status.player_is_playing:
return STATE_PLAYING
if media_status.player_is_paused:
return STATE_PAUSED
if media_status.player_is_idle:
return STATE_IDLE
if self._chromecast is not None and self._chromecast.is_idle:
return STATE_OFF
return None
@property
def media_content_id(self):
"""Content ID of current playing media."""
media_status = self._media_status()[0]
return media_status.content_id if media_status else None
@property
def media_content_type(self):
"""Content type of current playing media."""
if (media_status := self._media_status()[0]) is None:
return None
if media_status.media_is_tvshow:
return MEDIA_TYPE_TVSHOW
if media_status.media_is_movie:
return MEDIA_TYPE_MOVIE
if media_status.media_is_musictrack:
return MEDIA_TYPE_MUSIC
return None
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
media_status = self._media_status()[0]
return media_status.duration if media_status else None
@property
def media_image_url(self):
"""Image url of current playing media."""
if (media_status := self._media_status()[0]) is None:
return None
images = media_status.images
return images[0].url if images and images[0].url else None
@property
def media_title(self):
"""Title of current playing media."""
media_status = self._media_status()[0]
return media_status.title if media_status else None
@property
def media_artist(self):
"""Artist of current playing media (Music track only)."""
media_status = self._media_status()[0]
return media_status.artist if media_status else None
@property
def media_album_name(self):
"""Album of current playing media (Music track only)."""
media_status = self._media_status()[0]
return media_status.album_name if media_status else None
@property
def media_album_artist(self):
"""Album artist of current playing media (Music track only)."""
media_status = self._media_status()[0]
return media_status.album_artist if media_status else None
@property
def media_track(self):
"""Track number of current playing media (Music track only)."""
media_status = self._media_status()[0]
return media_status.track if media_status else None
@property
def media_series_title(self):
"""Return the title of the series of current playing media."""
media_status = self._media_status()[0]
return media_status.series_title if media_status else None
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
media_status = self._media_status()[0]
return media_status.season if media_status else None
@property
def media_episode(self):
"""Episode of current playing media (TV Show only)."""
media_status = self._media_status()[0]
return media_status.episode if media_status else None
@property
def app_id(self):
"""Return the ID of the current running app."""
return self._chromecast.app_id if self._chromecast else None
@property
def app_name(self):
"""Name of the current running app."""
return self._chromecast.app_display_name if self._chromecast else None
@property
def supported_features(self):
"""Flag media player features that are supported."""
support = SUPPORT_CAST
media_status = self._media_status()[0]
if (
self.cast_status
and self.cast_status.volume_control_type != VOLUME_CONTROL_TYPE_FIXED
):
support |= SUPPORT_VOLUME_MUTE | SUPPORT_VOLUME_SET
if media_status:
if media_status.supports_queue_next:
support |= SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK
if media_status.supports_seek:
support |= SUPPORT_SEEK
if "media_source" in self.hass.config.components:
support |= SUPPORT_BROWSE_MEDIA
return support
@property
def media_position(self):
"""Position of current playing media in seconds."""
media_status = self._media_status()[0]
if media_status is None or not (
media_status.player_is_playing
or media_status.player_is_paused
or media_status.player_is_idle
):
return None
return media_status.current_time
@property
def media_position_updated_at(self):
"""When was the position of the current playing media valid.
Returns value from homeassistant.util.dt.utcnow().
"""
media_status_recevied = self._media_status()[1]
return media_status_recevied
async def _async_cast_discovered(self, discover: ChromecastInfo):
"""Handle discovery of new Chromecast."""
if self._cast_info.uuid != discover.uuid:
# Discovered is not our device.
return
_LOGGER.debug("Discovered chromecast with same UUID: %s", discover)
self.async_set_cast_info(discover)
async def _async_stop(self, event):
"""Disconnect socket on Home Assistant stop."""
await self._async_disconnect()
def _handle_signal_show_view(
self,
controller: HomeAssistantController,
entity_id: str,
view_path: str,
url_path: str | None,
):
"""Handle a show view signal."""
if entity_id != self.entity_id or self._chromecast is None:
return
if self._hass_cast_controller is None:
self._hass_cast_controller = controller
self._chromecast.register_handler(controller)
self._hass_cast_controller.show_lovelace_view(view_path, url_path)
class DynamicCastGroup:
"""Representation of a Cast device on the network - for dynamic cast groups."""
def __init__(self, hass, cast_info: ChromecastInfo):
"""Initialize the cast device."""
self.hass = hass
self._cast_info = cast_info
self.services = cast_info.services
self._chromecast: pychromecast.Chromecast | None = None
self.mz_mgr = None
self._status_listener: CastStatusListener | None = None
self._add_remove_handler = None
self._del_remove_handler = None
def async_setup(self):
"""Create chromecast object."""
self._add_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_CAST_DISCOVERED, self._async_cast_discovered
)
self._del_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_CAST_REMOVED, self._async_cast_removed
)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._async_stop)
self.async_set_cast_info(self._cast_info)
self.hass.async_create_task(
async_create_catching_coro(self.async_connect_to_chromecast())
)
async def async_tear_down(self) -> None:
"""Disconnect Chromecast object."""
await self._async_disconnect()
if self._cast_info.uuid is not None:
# Remove the entity from the added casts so that it can dynamically
# be re-added again.
self.hass.data[ADDED_CAST_DEVICES_KEY].remove(self._cast_info.uuid)
if self._add_remove_handler:
self._add_remove_handler()
self._add_remove_handler = None
if self._del_remove_handler:
self._del_remove_handler()
self._del_remove_handler = None
def async_set_cast_info(self, cast_info):
"""Set the cast information and set up the chromecast object."""
self._cast_info = cast_info
async def async_connect_to_chromecast(self):
"""Set the cast information and set up the chromecast object."""
_LOGGER.debug(
"[%s %s] Connecting to cast device by service %s",
"Dynamic group",
self._cast_info.friendly_name,
self.services,
)
chromecast = await self.hass.async_add_executor_job(
pychromecast.get_chromecast_from_cast_info,
pychromecast.discovery.CastInfo(
self.services,
self._cast_info.uuid,
self._cast_info.model_name,
self._cast_info.friendly_name,
None,
None,
),
ChromeCastZeroconf.get_zeroconf(),
)
self._chromecast = chromecast
if CAST_MULTIZONE_MANAGER_KEY not in self.hass.data:
self.hass.data[CAST_MULTIZONE_MANAGER_KEY] = MultizoneManager()
self.mz_mgr = self.hass.data[CAST_MULTIZONE_MANAGER_KEY]
self._status_listener = CastStatusListener(self, chromecast, self.mz_mgr, True)
self._chromecast.start()
async def _async_disconnect(self):
"""Disconnect Chromecast object if it is set."""
if self._chromecast is None:
# Can't disconnect if not connected.
return
_LOGGER.debug(
"[%s %s] Disconnecting from chromecast socket",
"Dynamic group",
self._cast_info.friendly_name,
)
await self.hass.async_add_executor_job(self._chromecast.disconnect)
self._invalidate()
def _invalidate(self):
"""Invalidate some attributes."""
self._chromecast = None
self.mz_mgr = None
if self._status_listener is not None:
self._status_listener.invalidate()
self._status_listener = None
async def _async_cast_discovered(self, discover: ChromecastInfo):
"""Handle discovery of new Chromecast."""
if self._cast_info.uuid != discover.uuid:
# Discovered is not our device.
return
_LOGGER.debug("Discovered dynamic group with same UUID: %s", discover)
self.async_set_cast_info(discover)
async def _async_cast_removed(self, discover: ChromecastInfo):
"""Handle removal of Chromecast."""
if self._cast_info.uuid != discover.uuid:
# Removed is not our device.
return
if not discover.services:
# Clean up the dynamic group
_LOGGER.debug("Clean up dynamic group: %s", discover)
await self.async_tear_down()
async def _async_stop(self, event):
"""Disconnect socket on Home Assistant stop."""
await self._async_disconnect()
| lukas-hetzenecker/home-assistant | homeassistant/components/cast/media_player.py | Python | apache-2.0 | 31,771 | 0.000913 |
# -*- coding: utf-8 -*-
"""
this script is crap but I
don't feel like fixing it.
"""
import shutil
import os
import sys
import time
import tempfile
from bencode import *
base_dir = tempfile.gettempdir() + "\\ut-itunes-import"
item_list = []
file_count = 0
file_types = ['.mp3',]
if "--help" in str(sys.argv[1]).lower():
print """ Usage: python import.py [Path_to_resume.dat] [Path_to_Add_to_iTunes_folder] [Label(s) (optional)]
Optional arguments: [Label] only import files with specified label(s)"""
sys.exit()
if not os.path.isfile(str(sys.argv[1]).replace("\\","\\\\")) \
or not os.path.isdir(sys.argv[2]):
raise AssertionError("""Path does not exist. Please check your
resume.dat and Add to iTunes folder paths
are correct.""")
sys.exit()
else:
RESUME_DAT = sys.argv[1]
ADD_TO_ITUNES_FOLDER = sys.argv[2]
try:
# Labels don't do anything right now, sorry
CUSTOM_LABELS = sys.argv[3]
except:
pass
try:
META_INFO_FILE = open(RESUME_DAT, 'rb')
META_INFO_CONTENT = bdecode(META_INFO_FILE.read())
except Exception, e:
raise Exception("Could not find resume.dat file! Message: %s" % str(e))
try:
for torrent in META_INFO_CONTENT.keys():
item_list.append(torrent)
THIS_TORRENTS_FILE_LIST = []
if torrent == 'rec' or torrent == '.fileguard':
item_list.remove(torrent)
else:
if META_INFO_CONTENT[torrent]['labels'] == [] and META_INFO_CONTENT[torrent]['completed_on'] > 0:
print "[uTorrent metadata] Name: %s " % str(torrent)
print "[uTorrent metadata] Label(s): %s" % str(META_INFO_CONTENT[torrent]['labels'])
print "[uTorrent metadata] Path: %s" % str(META_INFO_CONTENT[torrent]['path'])
print "[uTorrent metadata] Completed: %s" % str(META_INFO_CONTENT[torrent]['completed_on'])
FINISHED_FOLDER_PATH = str(base_dir + str(torrent.strip(".torrent")))
print "Source: %s" % META_INFO_CONTENT[torrent]['path']
print "Destination %s" % FINISHED_FOLDER_PATH
print "Starting copy folder..."
if not os.path.isdir(FINISHED_FOLDER_PATH):
try:
print "Copying the folder to %s..." % str(base_dir)
shutil.copytree(META_INFO_CONTENT[torrent]['path'], FINISHED_FOLDER_PATH)
print "Copy finished."
except Exception, e:
raise Exception("""Error: Something went wrong when copying the %s
directory to %s! Message: %s"""
% (META_INFO_CONTENT[torrent]['path'], FINISHED_FOLDER_PATH, str(e)))
else:
print "Destination directory already exists. Skipping copy..."
print "Scanning for file types %s..." + str(file_types)
any_mp3s_in_here = False
for media_file in os.listdir(FINISHED_FOLDER_PATH):
for filetype in file_types:
if media_file[-4:] == filetype:
ADD_TO_ITUNES_SOURCE_FILE = str(FINISHED_FOLDER_PATH + "\\" + media_file)
THIS_TORRENTS_FILE_LIST.append(ADD_TO_ITUNES_SOURCE_FILE)
any_mp3s_in_here = True
file_count += 1
print "Found %s %s files..." % (str(file_count), str(file_types))
if not THIS_TORRENTS_FILE_LIST == []:
print str(THIS_TORRENTS_FILE_LIST)
if not file_count > 0:
print "Skipping copy..."
else:
print "Copying files to %s" + str(ADD_TO_ITUNES_FOLDER)
for file in THIS_TORRENTS_FILE_LIST:
try:
print "Copying: %s..." % file
shutil.copy(file, ADD_TO_ITUNES_FOLDER)
except Exception, e:
raise Exception("""Error: There was an issue copying the %s
file to the Add To iTunes directory! Message: %s"""
% (file, str(e)))
print "Success."
if THIS_TORRENTS_FILE_LIST == []:
print "KEEPING MOVED DIRECTORY INTACT SINCE THERE WERE NO MUSIC FILES MOVED..."
else:
try:
print "Cleaning up..."
shutil.rmtree(FINISHED_FOLDER_PATH)
except Exception, e:
raise Exception(""""Error: Could not delete the folder %s!"""
% (FINISHED_FOLDER_PATH, str(e)))
print "Success."
print "---"
except Exception, e:
print "Error: Something went wrong. Message: %s" % str(e)
finally:
print "Closing resume.dat..."
META_INFO_FILE.close()
print "Closed."
print "Cleaning up leftover files..."
try:
shutil.rmtree(base_dir)
except:
pass
print "All done."
| smithbr/ut-itunes-import | import.py | Python | mit | 5,276 | 0.005497 |
from .named import NamedExtensionManager
class DriverManager(NamedExtensionManager):
"""Load a single plugin with a given name from the namespace.
:param namespace: The namespace for the entry points.
:type namespace: str
:param name: The name of the driver to load.
:type name: str
:param invoke_on_load: Boolean controlling whether to invoke the
object returned by the entry point after the driver is loaded.
:type invoke_on_load: bool
:param invoke_args: Positional arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_args: tuple
:param invoke_kwds: Named arguments to pass when invoking
the object returned by the entry point. Only used if invoke_on_load
is True.
:type invoke_kwds: dict
:param on_load_failure_callback: Callback function that will be called when
a entrypoint can not be loaded. The arguments that will be provided
when this is called (when an entrypoint fails to load) are
(manager, entrypoint, exception)
:type on_load_failure_callback: function
:param verify_requirements: Use setuptools to enforce the
dependencies of the plugin(s) being loaded. Defaults to False.
:type verify_requirements: bool
"""
def __init__(self, namespace, name,
invoke_on_load=False, invoke_args=(), invoke_kwds={},
on_load_failure_callback=None,
verify_requirements=False):
super(DriverManager, self).__init__(
namespace=namespace,
names=[name],
invoke_on_load=invoke_on_load,
invoke_args=invoke_args,
invoke_kwds=invoke_kwds,
on_load_failure_callback=on_load_failure_callback,
verify_requirements=verify_requirements,
)
@classmethod
def make_test_instance(cls, extension, namespace='TESTING',
propagate_map_exceptions=False,
on_load_failure_callback=None,
verify_requirements=False):
"""Construct a test DriverManager
Test instances are passed a list of extensions to work from rather
than loading them from entry points.
:param extension: Pre-configured Extension instance
:type extension: :class:`~stevedore.extension.Extension`
:param namespace: The namespace for the manager; used only for
identification since the extensions are passed in.
:type namespace: str
:param propagate_map_exceptions: Boolean controlling whether exceptions
are propagated up through the map call or whether they are logged
and then ignored
:type propagate_map_exceptions: bool
:param on_load_failure_callback: Callback function that will
be called when a entrypoint can not be loaded. The
arguments that will be provided when this is called (when
an entrypoint fails to load) are (manager, entrypoint,
exception)
:type on_load_failure_callback: function
:param verify_requirements: Use setuptools to enforce the
dependencies of the plugin(s) being loaded. Defaults to False.
:type verify_requirements: bool
:return: The manager instance, initialized for testing
"""
o = super(DriverManager, cls).make_test_instance(
[extension], namespace=namespace,
propagate_map_exceptions=propagate_map_exceptions,
on_load_failure_callback=on_load_failure_callback,
verify_requirements=verify_requirements)
return o
def _init_plugins(self, extensions):
super(DriverManager, self)._init_plugins(extensions)
if not self.extensions:
name = self._names[0]
raise RuntimeError('No %r driver found, looking for %r' %
(self.namespace, name))
if len(self.extensions) > 1:
discovered_drivers = ','.join(e.entry_point_target
for e in self.extensions)
raise RuntimeError('Multiple %r drivers found: %s' %
(self.namespace, discovered_drivers))
def __call__(self, func, *args, **kwds):
"""Invokes func() for the single loaded extension.
The signature for func() should be::
def func(ext, *args, **kwds):
pass
The first argument to func(), 'ext', is the
:class:`~stevedore.extension.Extension` instance.
Exceptions raised from within func() are logged and ignored.
:param func: Callable to invoke for each extension.
:param args: Variable arguments to pass to func()
:param kwds: Keyword arguments to pass to func()
:returns: List of values returned from func()
"""
results = self.map(func, *args, **kwds)
if results:
return results[0]
@property
def driver(self):
"""Returns the driver being used by this manager.
"""
ext = self.extensions[0]
return ext.obj if ext.obj else ext.plugin
| devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/stevedore/driver.py | Python | agpl-3.0 | 5,248 | 0 |
import webbrowser
import os
import re
# Styles and scripting for the page
main_page_head = '''
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Fresh Tomatoes!</title>
<!-- Bootstrap 3 -->
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap-theme.min.css">
<script src="http://code.jquery.com/jquery-1.10.1.min.js"></script>
<script src="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/js/bootstrap.min.js"></script>
<style type="text/css" media="screen">
body {
padding-top: 80px;
background-color: #9e9e9e;
}
#trailer .modal-dialog {
margin-top: 200px;
width: 640px;
height: 480px;
}
.hanging-close {
position: absolute;
top: -12px;
right: -12px;
z-index: 9001;
}
#trailer-video {
width: 100%;
height: 100%;
}
.movie-tile {
margin-bottom: 20px;
padding-top: 20px;
}
.movie-tile:hover {
background-color: #EEE;
cursor: pointer;
}
.scale-media {
padding-bottom: 56.25%;
position: relative;
}
.scale-media iframe {
border: none;
height: 100%;
position: absolute;
width: 100%;
left: 0;
top: 0;
background-color: white;
}
/* Nathan added 08/30/16 */
.navbar-inverse .navbar-brand {
color: antiquewhite;
}
.navbar-brand,
.navbar-nav li a {
line-height: 65px;
height: 65px;
padding-top: 0;
}
.navbar-inverse {
background-color: darkslategray;
background-image: none;
border-color: gray;
}
.navbar-brand > img {
width:50px;
height:50px;
}
</style>
<script type="text/javascript" charset="utf-8">
// Pause the video when the modal is closed
$(document).on('click', '.hanging-close, .modal-backdrop, .modal', function (event) {
// Remove the src so the player itself gets removed, as this is the only
// reliable way to ensure the video stops playing in IE
$("#trailer-video-container").empty();
});
// Start playing the video whenever the trailer modal is opened
$(document).on('click', '.movie-tile', function (event) {
var trailerYouTubeId = $(this).attr('data-trailer-youtube-id')
var sourceUrl = 'http://www.youtube.com/embed/' + trailerYouTubeId + '?autoplay=1&html5=1';
$("#trailer-video-container").empty().append($("<iframe></iframe>", {
'id': 'trailer-video',
'type': 'text-html',
'src': sourceUrl,
'frameborder': 0
}));
});
// Animate in the movies when the page loads
$(document).ready(function () {
$('.movie-tile').hide().first().show("fast", function showNext() {
$(this).next("div").show("fast", showNext);
});
});
</script>
</head>
'''
# The main page layout and title bar
main_page_content = '''
<body>
<!-- Trailer Video Modal -->
<div class="modal" id="trailer">
<div class="modal-dialog">
<div class="modal-content">
<a href="#" class="hanging-close" data-dismiss="modal" aria-hidden="true">
<img src="https://lh5.ggpht.com/v4-628SilF0HtHuHdu5EzxD7WRqOrrTIDi_MhEG6_qkNtUK5Wg7KPkofp_VJoF7RS2LhxwEFCO1ICHZlc-o_=s0#w=24&h=24"/>
</a>
<div class="scale-media" id="trailer-video-container">
</div>
</div>
</div>
</div>
<!-- Main Page Content -->
<div class="container">
<div class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<a class="navbar-brand" href="#">
<img alt="Fresh Tomatoes Logo" src="Tomato-78.png">
Fresh Tomatoes! Trailers
</a>
</div>
</div>
</div>
</div>
<div class="container">
{movie_tiles}
</div>
</body>
</html>
'''
# A single movie entry html template
movie_tile_content = '''
<div class="col-md-6 col-lg-4 movie-tile text-center" data-trailer-youtube-id="{trailer_youtube_id}" data-toggle="modal" data-target="#trailer">
<img src="{poster_image_url}" width="220" height="342">
<h2>{movie_title}</h2>
</div>
'''
def create_movie_tiles_content(movies):
# The HTML content for this section of the page
content = ''
for movie in movies:
# Extract the youtube ID from the url
youtube_id_match = re.search(
r'(?<=v=)[^&#]+', movie.trailer_youtube_url)
youtube_id_match = youtube_id_match or re.search(
r'(?<=be/)[^&#]+', movie.trailer_youtube_url)
trailer_youtube_id = (youtube_id_match.group(0) if youtube_id_match
else None)
# Append the tile for the movie with its content filled in
content += movie_tile_content.format(
movie_title=movie.title,
poster_image_url=movie.poster_image_url,
trailer_youtube_id=trailer_youtube_id
)
return content
def open_movies_page(movies):
# Create or overwrite the output file
output_file = open('fresh_tomatoes.html', 'w')
# Replace the movie tiles placeholder generated content
rendered_content = main_page_content.format(
movie_tiles=create_movie_tiles_content(movies))
# Output the file
output_file.write(main_page_head + rendered_content)
output_file.close()
# open the output file in the browser (in a new tab, if possible)
url = os.path.abspath(output_file.name)
webbrowser.open('file://' + url, new=2)
| nathandh/udacity-fullstack-MovieTrailerWebsite | fresh_tomatoes.py | Python | mit | 6,106 | 0.003767 |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import os
import glob
import sys
#VERSION="2.1dev4"
VERSION="2.6dev5"
# Taken from kennethreitz/requests/setup.py
package_directory = os.path.realpath(os.path.dirname(__file__))
def get_file_contents(file_path):
"""Get the context of the file using full path name."""
content = ""
try:
full_path = os.path.join(package_directory, file_path)
content = open(full_path, 'r').read()
except:
print >> sys.stderr, "### could not open file %r" % file_path
return content
setup(
name='privacyIDEA',
version=VERSION,
description='privacyIDEA: identity, multifactor authentication (OTP), '
'authorization, audit',
author='privacyidea.org',
license='AGPLv3',
author_email='cornelius@privacyidea.org',
url='http://www.privacyidea.org',
keywords='OTP, two factor authentication, management, security',
packages=find_packages(),
scripts=['pi-manage.py',
'tools/privacyidea-convert-token',
'tools/privacyidea-create-pwidresolver-user',
'tools/privacyidea-create-sqlidresolver-user',
'tools/privacyidea-pip-update',
'tools/privacyidea-create-certificate',
'tools/privacyidea-fix-access-rights',
'tools/privacyidea-create-ad-users',
'tools/privacyidea-fetchssh.sh',
'tools/privacyidea-create-userdb.sh'
],
extras_require={
'dev': ["Sphinx>=1.3.1",
"sphinxcontrib-httpdomain>=1.3.0"],
'test': ["coverage>=3.7.1",
"mock>=1.0.1",
"nose>=1.3.4",
"responses>=0.4.0",
"six>=1.8.0"],
},
install_requires=["Flask>=0.10.1",
"Flask-Cache>=0.13.1",
"Flask-Migrate>=1.2.0",
"Flask-SQLAlchemy>=2.0",
"Flask-Script>=2.0.5",
"Jinja2>=2.7.3",
"Mako>=0.9.1",
"MarkupSafe>=0.23",
"MySQL-python>=1.2.5",
"Pillow>=2.6.1",
"PyJWT>=1.3.0",
"PyYAML>=3.11",
"Pygments>=2.0.2",
"SQLAlchemy>=1.0.5",
"Werkzeug>=0.10.4",
"alembic>=0.6.7",
"argparse>=1.2.1",
"bcrypt>=1.1.0",
"beautifulsoup4>=4.3.2",
"cffi>=0.8.6",
"configobj>=5.0.6",
"docutils>=0.12",
"funcparserlib>=0.3.6",
"itsdangerous>=0.24",
"ldap3>=0.9.8.4",
"netaddr>=0.7.12",
"passlib>=1.6.2",
"pyasn1>=0.1.7",
"pyOpenSSL>=0.15.1",
"pycparser>=2.10",
"pycrypto>=2.6.1",
"pyrad>=2.0",
"pyusb>=1.0.0b2",
"qrcode>=5.1",
"requests>=2.7.0",
"sqlsoup>=0.9.0",
"wsgiref>=0.1.2"
],
include_package_data=True,
data_files=[('etc/privacyidea/',
['deploy/apache/privacyideaapp.wsgi',
'deploy/privacyidea/dictionary',
'deploy/privacyidea/enckey',
'deploy/privacyidea/private.pem',
'deploy/privacyidea/public.pem']),
('share/man/man1',
["tools/privacyidea-convert-token.1",
"tools/privacyidea-create-pwidresolver-user.1",
"tools/privacyidea-create-sqlidresolver-user.1",
"tools/privacyidea-pip-update.1",
"tools/privacyidea-create-certificate.1",
"tools/privacyidea-fix-access-rights.1"
]),
('lib/privacyidea/authmodules/FreeRADIUS',
["authmodules/FreeRADIUS/LICENSE",
"authmodules/FreeRADIUS/privacyidea_radius.pm"]),
('lib/privacyidea/authmodules/OTRS',
["authmodules/OTRS/privacyIDEA.pm"]),
('lib/privacyidea/migrations',
["migrations/alembic.ini",
"migrations/env.py",
"migrations/README",
"migrations/script.py.mako"]),
('lib/privacyidea/migrations/versions',
["migrations/versions/2551ee982544_.py",
"migrations/versions/4f32a4e1bf33_.py",
"migrations/versions/2181294eed0b_.py",
"migrations/versions/e5cbeb7c177_.py",
"migrations/versions/4d9178fa8336_.py",
"migrations/versions/20969b4cbf06_.py"])
],
classifiers=["Framework :: Flask",
"License :: OSI Approved :: "
"GNU Affero General Public License v3",
"Programming Language :: Python",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet",
"Topic :: Security",
"Topic :: System ::"
" Systems Administration :: Authentication/Directory"
],
#message_extractors={'privacyidea': [
# ('**.py', 'python', None),
# ('static/**.html', 'html', {'input_encoding': 'utf-8'})]},
zip_safe=False,
long_description=get_file_contents('README.md')
)
| woddx/privacyidea | setup.py | Python | agpl-3.0 | 5,648 | 0.001593 |
# coding: utf-8
"""
Environmental Exposures API
API for environmental exposure models for NIH Data Translator program
OpenAPI spec version: 1.0.0
Contact: stealey@renci.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class DateRange(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, start_date=None, end_date=None):
"""
DateRange - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'start_date': 'datetime',
'end_date': 'datetime'
}
self.attribute_map = {
'start_date': 'start_date',
'end_date': 'end_date'
}
self._start_date = start_date
self._end_date = end_date
@property
def start_date(self):
"""
Gets the start_date of this DateRange.
:return: The start_date of this DateRange.
:rtype: datetime
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""
Sets the start_date of this DateRange.
:param start_date: The start_date of this DateRange.
:type: datetime
"""
self._start_date = start_date
@property
def end_date(self):
"""
Gets the end_date of this DateRange.
:return: The end_date of this DateRange.
:rtype: datetime
"""
return self._end_date
@end_date.setter
def end_date(self, end_date):
"""
Sets the end_date of this DateRange.
:param end_date: The end_date of this DateRange.
:type: datetime
"""
self._end_date = end_date
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| mjstealey/exposures-api | python-client/exposures_api/models/date_range.py | Python | mit | 4,058 | 0.000246 |
""" Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp437',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00a2, # CENT SIGN
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00a5, # YEN SIGN
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
'\xff' # 0x0098 -> LATIN SMALL LETTER Y WITH DIAERESIS
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xa2' # 0x009b -> CENT SIGN
'\xa3' # 0x009c -> POUND SIGN
'\xa5' # 0x009d -> YEN SIGN
'\u20a7' # 0x009e -> PESETA SIGN
'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
'\u2310' # 0x00a9 -> REVERSED NOT SIGN
'\xac' # 0x00aa -> NOT SIGN
'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u258c' # 0x00dd -> LEFT HALF BLOCK
'\u2590' # 0x00de -> RIGHT HALF BLOCK
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA
'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI
'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA
'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA
'\xb5' # 0x00e6 -> MICRO SIGN
'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU
'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI
'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA
'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA
'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA
'\u221e' # 0x00ec -> INFINITY
'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI
'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON
'\u2229' # 0x00ef -> INTERSECTION
'\u2261' # 0x00f0 -> IDENTICAL TO
'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
'\u2320' # 0x00f4 -> TOP HALF INTEGRAL
'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL
'\xf7' # 0x00f6 -> DIVISION SIGN
'\u2248' # 0x00f7 -> ALMOST EQUAL TO
'\xb0' # 0x00f8 -> DEGREE SIGN
'\u2219' # 0x00f9 -> BULLET OPERATOR
'\xb7' # 0x00fa -> MIDDLE DOT
'\u221a' # 0x00fb -> SQUARE ROOT
'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
'\xb2' # 0x00fd -> SUPERSCRIPT TWO
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x009b, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a5: 0x009d, # YEN SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b5: 0x00e6, # MICRO SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00ff: 0x0098, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA
0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA
0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA
0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI
0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA
0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA
0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA
0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON
0x03c0: 0x00e3, # GREEK SMALL LETTER PI
0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00e7, # GREEK SMALL LETTER TAU
0x03c6: 0x00ed, # GREEK SMALL LETTER PHI
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x20a7: 0x009e, # PESETA SIGN
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x221e: 0x00ec, # INFINITY
0x2229: 0x00ef, # INTERSECTION
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2261: 0x00f0, # IDENTICAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2310: 0x00a9, # REVERSED NOT SIGN
0x2320: 0x00f4, # TOP HALF INTEGRAL
0x2321: 0x00f5, # BOTTOM HALF INTEGRAL
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| Microvellum/Fluid-Designer | win64-vc/2.78/python/lib/encodings/cp437.py | Python | gpl-3.0 | 34,564 | 0.019355 |
import pytest
import ray
from ray import tune
pytest.importorskip("horovod")
try:
from ray.tune.integration.horovod import (
DistributedTrainableCreator, _train_simple, _train_validate_session)
except ImportError:
pass # This shouldn't be reached - the test should be skipped.
@pytest.fixture
def ray_start_2_cpus():
address_info = ray.init(num_cpus=2)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_start_4_cpus():
address_info = ray.init(num_cpus=4)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_connect_cluster():
try:
address_info = ray.init(address="auto")
except Exception as e:
pytest.skip(str(e))
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
def test_single_step(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(
_train_simple, num_hosts=1, num_slots=2)
trainer = trainable_cls()
trainer.train()
trainer.stop()
def test_step_after_completion(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(
_train_simple, num_hosts=1, num_slots=2)
trainer = trainable_cls(config={"epochs": 1})
with pytest.raises(RuntimeError):
for i in range(10):
trainer.train()
def test_validation(ray_start_2_cpus):
def bad_func(a, b, c):
return 1
t_cls = DistributedTrainableCreator(bad_func, num_slots=2)
with pytest.raises(ValueError):
t_cls()
def test_set_global(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(_train_simple, num_slots=2)
trainable = trainable_cls()
result = trainable.train()
trainable.stop()
assert result["rank"] == 0
@pytest.mark.parametrize("enabled_checkpoint", [True, False])
def test_simple_tune(ray_start_4_cpus, enabled_checkpoint):
trainable_cls = DistributedTrainableCreator(_train_simple, num_slots=2)
analysis = tune.run(
trainable_cls,
config={"enable_checkpoint": enabled_checkpoint},
num_samples=2,
stop={"training_iteration": 2})
assert analysis.trials[0].last_result["training_iteration"] == 2
assert analysis.trials[0].has_checkpoint() == enabled_checkpoint
@pytest.mark.parametrize("use_gpu", [True, False])
def test_resource_tune(ray_connect_cluster, use_gpu):
if use_gpu and ray.cluster_resources().get("GPU", 0) == 0:
pytest.skip("No GPU available.")
trainable_cls = DistributedTrainableCreator(
_train_simple, num_slots=2, use_gpu=use_gpu)
analysis = tune.run(
trainable_cls, num_samples=2, stop={"training_iteration": 2})
assert analysis.trials[0].last_result["training_iteration"] == 2
def test_validate_session(ray_start_2_cpus):
trainable_cls = DistributedTrainableCreator(_train_validate_session)
tune.run(trainable_cls)
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| pcmoritz/ray-1 | python/ray/tune/tests/test_horovod.py | Python | apache-2.0 | 3,092 | 0 |
#QLinearGradient myGradient;
#QPen myPen;
#QPolygonF myPolygon;
#QPainterPath myPath;
#myPath.addPolygon(myPolygon);
#QPainter painter(this);
#painter.setBrush(myGradient);
#painter.setPen(myPen);
#painter.drawPath(myPath);
import math
from PyQt5 import QtCore, QtGui, QtWidgets
class ArrowItem(QtWidgets.QGraphicsItem):
def definePath(self):
poligonArrow=QtGui.QPolygonF()
poligonArrow.append(QtCore.QPointF(0.0, 5.0))
poligonArrow.append(QtCore.QPointF(60.0, 5.0))
poligonArrow.append(QtCore.QPointF(60.0, 10.0))
poligonArrow.append(QtCore.QPointF(80.0, 0.0))
poligonArrow.append(QtCore.QPointF(60.0, -10.0))
poligonArrow.append(QtCore.QPointF(60.0, -5.0))
poligonArrow.append(QtCore.QPointF(0.0, -5.0))
poligonArrow.append(QtCore.QPointF(0.0, 5.0))
arrowPath=QtGui.QPainterPath()
arrowPath.addPolygon(poligonArrow)
return arrowPath
def boundingRect(self):
"""
overloading of the qt bounding rectangle
"""
return QtCore.QRectF(-1,-250 ,80,50)
def paint(self, painter,option,widget):
"""
overloading of the paint method
"""
painter.setPen(QtGui.QPen(QtGui.QColor(79, 106, 25)))
painter.setBrush(QtGui.QColor(122, 163, 39))
painter.drawPath(self.definePath())
| chiamingyen/PythonCAD_py3 | Interface/Entity/arrowitem.py | Python | gpl-2.0 | 1,388 | 0.029539 |
#coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, CommAdminView
class MobilePlugin(BaseAdminPlugin):
def _test_mobile(self):
try:
return self.request.META['HTTP_USER_AGENT'].find('Android') >= 0 or \
self.request.META['HTTP_USER_AGENT'].find('iPhone') >= 0
except Exception:
return False
def init_request(self, *args, **kwargs):
return self._test_mobile()
def get_context(self, context):
#context['base_template'] = 'nadmin/base_mobile.html'
context['is_mob'] = True
return context
# Media
# def get_media(self, media):
# return media + self.vendor('nadmin.mobile.css', )
def block_extrahead(self, context, nodes):
nodes.append('<script>window.__admin_ismobile__ = true;</script>')
site.register_plugin(MobilePlugin, CommAdminView)
| A425/django-nadmin | nadmin/plugins/mobile.py | Python | mit | 904 | 0.004425 |
import biopsy
import bifa_server
import base64, threading , os, socket
from sslUserCheck import CheckUserEngine
from soaplib.wsgi_soap import WSGISoapApp
from soaplib.wsgi_soap import SoapServiceBase
from soaplib.service import soapmethod
from soaplib.client import make_service_client
from soaplib.serializers.primitive import String, Integer, Array, Boolean, Float
from soaplib.serializers.binary import Attachment
from soaplib.serializers.clazz import ClassSerializer
# This does not need to be changed for local Windows testing
LOCALID = 'wsbc.warwick.ac.uk'
from tempfile import mkstemp
import os
global server
global cu
global portNo
def localIp():
if os.name == 'nt' :
return '127.0.0.1'
else :
name = socket.gethostname()
if name == 'wsbc.cov.warwick.ac.uk' :
return 'wsbc.warwick.ac.uk' #return external ethernet name
else :
pt = socket.gethostbyname(name)
print pt
return pt
#class userDataClass :
# username = String
# password = String
# OK = Boolean
# Used as part of the soap interface
class Version(ClassSerializer):
class types:
majorVersion = Integer
minorVersion = Integer
class BiFaWSGISoapApp(WSGISoapApp, SoapServiceBase):
'''
This object is a VERY simple extension of the base WSGISoapApp.
It subclasses both WSGISoapApp, and SoapServiceBase, so that
an object can simply subclass this single object, and it will
be both a wsgi application and a soap service. This is convenient
if you want to only expose some functionality, and don't need
complex handler mapping, and all of the functionality can be put
in a single class.
'''
def onWsdl(self, environ, wsdl):
client = make_service_client('http://%s' % (localId), BiFa())
return client.server.wsdl('')
'''
This is called when a wsdl is requested
@param the wsgi environment
@param the wsdl string
'''
def __init__(self):
self.cookie = ""
self.state = -9
WSGISoapApp.__init__(self)
SoapServiceBase.__init__(self)
def getHandler(self, environ):
global userCheckedEvent
global checkUserEvent
auth = environ.get("HTTP_AUTHORIZATION")
if auth == None :
raise Exception("Requests must include HTTP authorization")
if auth == '' :
raise Exception("Requests must include HTTP authorization")
if auth[0:6]=="Basic " :
auth = auth[6:]
else :
raise Exception("Requests must include HTTP basic authorization")
auth = base64.decodestring(auth)
user, sep, password = auth.partition(':')
biopsy.UserAdmin.user = user
self.cookie,self.state = cu.check(user, password)
if self.cookie == "" :
print "No cookie"
raise Exception("Invalid user")
return self
# Soap types that are created in the bifa_server
from bifa_server import BiFaHit,PssmInfoData
class BiFa(BiFaWSGISoapApp):
@soapmethod(String, Float, Integer, Array(String), Boolean, String, Float, String, Boolean, Array(String), _returns=Array(BiFaHit))
def BiFaHits(self,sequence,threshold,algorithm,phyloSequences,useConsensusSequences,matrixSpecies,phyloThreshold,matrixNameMatch,useCumulativeLikelihoods,pssmSets):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
if pssmSets != None :
pssms = pssmSets[0]
else :
pssms = ""
hits=bifa_server.bifa_hits(sequence, threshold, algorithm, phyloSequences,
useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch,
useCumulativeLikelihoods, pssmSets)
return hits
@soapmethod(String, Float, String, Integer, Boolean, Array(String), Boolean, String, Float, String, Boolean, Boolean, Array(String), _returns=String)
def BiFaAnalyser(self, sequence, threshold, title, algorithm, showLabels, phyloSequences, useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch, useOldAlgorithm, useCumulativeLikelihoods, pssmSets) :
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
if pssmSets != None :
pssms = pssmSets[0]
else :
pssms = ""
ps = ""
if phyloSequences != None :
i = 1
for seq in phyloSequences :
ps += "> Seq %i\n" % i
ps += seq
ps += "\n"
i += 1
str = "> RefSeq\n%s\n,%s,%f,%i,%i,%i,%s,%f,%s,%i,%i,%s\n" % (sequence, ps, threshold, algorithm, showLabels, useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch, useOldAlgorithm, useCumulativeLikelihoods, pssms )
cu.log2(str)
temp=bifa_server.bifa_tool(sequence, threshold, title, algorithm, showLabels, phyloSequences, useConsensusSequences, matrixSpecies, phyloThreshold, matrixNameMatch, useOldAlgorithm, useCumulativeLikelihoods, pssmSets)
output_svg_file="".join([temp, ".svg"])
if os.path.isfile(output_svg_file):
f1=open(output_svg_file, 'r')
svg_string=f1.readlines()
f1.close()
os.remove(output_svg_file)
return "".join(svg_string)
else:
return "no file"
@soapmethod(String, _returns= String)
def returningString(self, key):
# If no password was provided then the cookie is used to validate the client
# otherwise it is a real cookie
p1, sep, p2 = key.partition(':')
if key == "connection_test" :
if self.state == -1 :
rv = "keyCheck:" + self.cookie
else :
rv = "established:" + self.cookie
return rv
elif key == "connection_info":
if self.state == -1 :
rv = "keyCheck:" + self.cookie
else :
rv = "established:" + self.cookie + ":"+self.transfacVersion() + "." + self.customPssmVersion()
return rv
else :
return "unknown request"
@soapmethod(String, String, Float, _returns=Array(Float))
def scorePssmOnSequence(self, pssm_name, sequence, threshold):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.score_pssm_on_sequence(pssm_name, sequence, threshold)
@soapmethod(Array(String), Array(String), Integer, _returns=Array(String))
def scorePssmsOnSequences(self, pssmNames, sequences, algorithm):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.score_pssms_on_sequences(pssmNames, sequences, algorithm)
@soapmethod(_returns=Float)
def bindingPrior(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return biopsy.Environment.bindingPrior
@soapmethod(_returns=Integer)
def maxChainMaxNumSequences(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return biopsy.Environment.max_chain_max_num_sequences
@soapmethod(_returns=Array(String))
def PssmSetNames(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.get_pssm_set_names()
@soapmethod(Boolean,String,String,Array(String),_returns=Array(String))
def Pssms(self,useConsensusSequences,matrixSpecies,matrixNameMatch,pssmSets):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.pssmAccs(pssmSets,useConsensusSequences,matrixSpecies,matrixNameMatch)
@soapmethod(String,_returns=PssmInfoData)
def PssmInfo(self,pssmName):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return bifa_server.get_pssm_info(pssmName)
@soapmethod(String,_returns=Array(String))
def PssmFreqs(self,pssmName):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
if (biopsy.UserAdmin.userType < 2) :
raise Exception("Operation not allowed for current user")
return bifa_server.get_pssm_freqs(pssmName)
@soapmethod(String,_returns=Array(String))
def PssmCounts(self,pssmName):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
if (biopsy.UserAdmin.userType < 2) :
raise Exception("Operation not allowed for current user")
return bifa_server.get_pssm_counts(pssmName)
@soapmethod(_returns=Version)
def serverVersion(self):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
v = Version()
v.majorVersion = 5
v.minorVersion = 1
return v
@soapmethod(_returns=String)
def transfacVersion(self):
v = "%d.%d" % (biopsy.Environment.transfac_major_version, biopsy.Environment.transfac_minor_version)
return v
@soapmethod(_returns=String)
def customPssmVersion(self):
v = biopsy.Environment.custom_PSSM_version
return v
@soapmethod(_returns=Integer)
def userType(self):
ut = biopsy.UserAdmin.userType
return ut
@soapmethod(String,_returns=Boolean)
def setPassword(self,newPassword):
if not biopsy.UserAdmin.isAllowed :
raise Exception("Invalid user")
return biopsy.UserAdmin.setPasswd(newPassword)
class ServerThread(threading.Thread):
def run(self):
from cherrypy.wsgiserver import CherryPyWSGIServer
# Not possible to use 127.0.0.1 as this does not link to an
# externally accessible interface
global server
server = CherryPyWSGIServer((localIp(), portNo), BiFa())
print "Started serving:"
server.start()
if __name__=='__main__':
# userData = userDataClass()
# regenerate the wsdl at every startup. This is not
# needed but keeps the code accessible.
# Server address is a dummy
client = make_service_client('http://%s' % (localIp()), BiFa())
f1 = open("bifa.wsdl", 'w')
f1.write(client.server.wsdl(''))
# Need to flush before close, otherwise we only get 4096 bytes!
f1.flush()
f1.close()
f2 = file("config.dat")
for line in f2:
line = line.strip()
key = line[ : line.find('=') ].strip()
value = line[ line.find('=') + 1 : ].strip()
if (key == "port"):
portNo = int(value)
f2.close()
print "Port no %i " % portNo
# start server on a second thread
# it = ServerThread()
# it.start()
from cherrypy.wsgiserver import CherryPyWSGIServer
# Not possible to use 127.0.0.1 as this does not link to an
# externally accessible interface
cu = CheckUserEngine()
# global server
server = CherryPyWSGIServer((localIp(), portNo), BiFa())
print "Started serving:"
server.start()
print "Finished"
| JohnReid/biopsy | Python/bio_server/main_server.py | Python | mit | 11,092 | 0.014245 |
import pytest
def test_pack_unpack():
header = ('json', 301)
from gpopup.ipc import _pack_header, _unpack_header
header_bytes = _pack_header(*header)
header_out = _unpack_header(header_bytes)
assert header == header_out
assert header[0] == header_out.type
assert header[1] == header_out.length
def test_test_get_client(IpcServer):
Client = IpcServer.get_client()
c = Client()
s = IpcServer()
assert c.sock_name == s.sock_name
def test_ping(echo_client):
assert echo_client.ping() == True
def test_pargs(echo_client):
pargs = 9, 8, 7
args, kw = echo_client.echo(*pargs)
assert pargs == args
assert {} == kw
def test_kwargs(echo_client):
kwargs = {
'a': [0,1,2],
'b': 'some string',
'c': print,
}
args, kw = echo_client.echo(**kwargs)
assert () == args
assert kwargs == kw
def test_adding_cmds(MathServer):
Client = MathServer.get_client()
assert 'cos' in Client.__dict__
assert 'erf' in Client.__dict__
def test_calc(math_client):
import math
c = math_client
assert c.cos(0.5) == pytest.approx(math.cos(0.5))
assert c.erf(0.1) == pytest.approx(math.erf(0.1))
def test_json(IpcServer):
assert IpcServer.serial_method == 'pickle'
IpcServer.serial_method = 'json'
assert IpcServer.serial_method == 'json'
Client = IpcServer.get_client()
assert Client.serial_method == 'json'
c = Client()
c.start_server_maybe()
pargs, kwargs = c.echo(42)
assert c.serial_method == 'json'
assert kwargs == {}
assert pargs == [42,]
c.kill_server()
def test_no_server(IpcServer):
Client = IpcServer.get_client()
with pytest.raises(ConnectionError):
Client().ping()
def test_busy(IpcServer):
serv = IpcServer()
serv2 = IpcServer()
assert serv.sock_name == serv2.sock_name
Client = serv.get_client()
c = Client()
with pytest.raises(ConnectionError):
c.ping()
serv.run(background=True)
assert c.ping() == True
assert serv2.run() == False
c.kill_server()
def test_foreground(IpcServer):
serv = IpcServer()
Client = serv.get_client()
c = Client()
with pytest.raises(ConnectionError):
c.ping()
import threading
run = lambda: serv.run(background=False)
t = threading.Thread(target=run)
t.start()
assert c.ping() == True
assert c.echo(37, wow='okay') == ((37,), {'wow': 'okay'})
c.kill_server()
t.join(1)
def test_fail_cmd(echo_client):
assert echo_client.run_cmd('ping') == True
with pytest.raises(AttributeError):
echo_client.run_cmd('asdfasdf', 1, 3)
| frostidaho/python-gpopup | tests/test_ipc2.py | Python | bsd-2-clause | 2,655 | 0.008286 |
import bpy
camera = bpy.context.edit_movieclip.tracking.camera
camera.sensor_width = 23.6
camera.units = 'MILLIMETERS'
camera.pixel_aspect = 1
camera.k1 = 0.0
camera.k2 = 0.0
camera.k3 = 0.0
| Microvellum/Fluid-Designer | win64-vc/2.78/scripts/presets/tracking_camera/Nikon_DX.py | Python | gpl-3.0 | 192 | 0 |
#!/usr/bin/env python
import logging
import boto.ec2
import config
class EC2Client:
def __init__(self):
self.conn = boto.ec2.connect_to_region(config.ec2_region)
def stop(self):
if self.get_status() in ['running', 'pending']:
logging.info('Stopping server...')
self.conn.stop_instances(instance_ids=[config.ec2_instance_id])
def start(self):
if self.get_status() == 'stopped':
logging.info('Starting server...')
self.conn.start_instances(instance_ids=[config.ec2_instance_id])
def get_status(self):
return self.get_instance()._state.name
def get_ip(self):
return self.get_instance().ip_address
def get_instance(self):
for instance in self.conn.get_only_instances():
if instance.id == config.ec2_instance_id:
return instance
return None
| felixbade/minecraft-proxy | app/server_manager/ec2.py | Python | artistic-2.0 | 902 | 0.001109 |
#!/usr/bin/env python3
#
# Tests if the stochastic degradation (toy) model works.
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
import unittest
import numpy as np
import pints
import pints.toy
from pints.toy import StochasticDegradationModel
class TestStochasticDegradationModel(unittest.TestCase):
"""
Tests if the stochastic degradation (toy) model works.
"""
def test_start_with_zero(self):
# Test the special case where the initial molecule count is zero
model = StochasticDegradationModel(0)
times = [0, 1, 2, 100, 1000]
parameters = [0.1]
values = model.simulate(parameters, times)
self.assertEqual(len(values), len(times))
self.assertTrue(np.all(values == np.zeros(5)))
def test_start_with_twenty(self):
# Run small simulation
model = pints.toy.StochasticDegradationModel(20)
times = [0, 1, 2, 100, 1000]
parameters = [0.1]
values = model.simulate(parameters, times)
self.assertEqual(len(values), len(times))
self.assertEqual(values[0], 20)
self.assertEqual(values[-1], 0)
self.assertTrue(np.all(values[1:] <= values[:-1]))
def test_suggested(self):
model = pints.toy.StochasticDegradationModel(20)
times = model.suggested_times()
parameters = model.suggested_parameters()
self.assertTrue(len(times) == 101)
self.assertTrue(parameters > 0)
def test_simulate(self):
times = np.linspace(0, 100, 101)
model = StochasticDegradationModel(20)
time, mol_count = model.simulate_raw([0.1])
values = model.interpolate_mol_counts(time, mol_count, times)
self.assertTrue(len(time), len(mol_count))
# Test output of Gillespie algorithm
self.assertTrue(np.all(mol_count == np.array(range(20, -1, -1))))
# Check simulate function returns expected values
self.assertTrue(np.all(values[np.where(times < time[1])] == 20))
# Check interpolation function works as expected
temp_time = np.array([np.random.uniform(time[0], time[1])])
self.assertEqual(
model.interpolate_mol_counts(time, mol_count, temp_time)[0],
20)
temp_time = np.array([np.random.uniform(time[1], time[2])])
self.assertEqual(
model.interpolate_mol_counts(time, mol_count, temp_time)[0],
19)
def test_mean_variance(self):
# test mean
model = pints.toy.StochasticDegradationModel(10)
v_mean = model.mean([1], [5, 10])
self.assertEqual(v_mean[0], 10 * np.exp(-5))
self.assertEqual(v_mean[1], 10 * np.exp(-10))
model = pints.toy.StochasticDegradationModel(20)
v_mean = model.mean([5], [7.2])
self.assertEqual(v_mean[0], 20 * np.exp(-7.2 * 5))
# test variance
model = pints.toy.StochasticDegradationModel(10)
v_var = model.variance([1], [5, 10])
self.assertEqual(v_var[0], 10 * (np.exp(5) - 1.0) / np.exp(10))
self.assertAlmostEqual(v_var[1], 10 * (np.exp(10) - 1.0) / np.exp(20))
model = pints.toy.StochasticDegradationModel(20)
v_var = model.variance([2.0], [2.0])
self.assertAlmostEqual(v_var[0], 20 * (np.exp(4) - 1.0) / np.exp(8))
def test_errors(self):
model = pints.toy.StochasticDegradationModel(20)
# parameters, times cannot be negative
times = np.linspace(0, 100, 101)
parameters = [-0.1]
self.assertRaises(ValueError, model.simulate, parameters, times)
self.assertRaises(ValueError, model.mean, parameters, times)
self.assertRaises(ValueError, model.variance, parameters, times)
times_2 = np.linspace(-10, 10, 21)
parameters_2 = [0.1]
self.assertRaises(ValueError, model.simulate, parameters_2, times_2)
self.assertRaises(ValueError, model.mean, parameters_2, times_2)
self.assertRaises(ValueError, model.variance, parameters_2, times_2)
# this model should have 1 parameter
parameters_3 = [0.1, 1]
self.assertRaises(ValueError, model.simulate, parameters_3, times)
self.assertRaises(ValueError, model.mean, parameters_3, times)
self.assertRaises(ValueError, model.variance, parameters_3, times)
# Initial value can't be negative
self.assertRaises(ValueError, pints.toy.StochasticDegradationModel, -1)
if __name__ == '__main__':
unittest.main()
| martinjrobins/hobo | pints/tests/test_toy_stochastic_degradation_model.py | Python | bsd-3-clause | 4,627 | 0 |
# for backwards compatibility with earlier python versions
unit_test_module = None
def get_unit_test_module():
try:
import unittest
unit_test_module = unittest
except ImportError:
import unittest2
unit_test_module = unittest2
return unit_test_module
| parallel-fs-utils/fs-drift | unit_test_module.py | Python | apache-2.0 | 295 | 0.00339 |
# -*- coding: utf-8 -*-
#
# Copyright 2017 David García Goñi
#
# This file is part of Phatty.
#
# Phatty is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Phatty is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Phatty. If not, see <http://www.gnu.org/licenses/>.
import unittest
import os
import phatty
import mido
from mido import Message
import mock
from mock import Mock
from mock import call
from phatty.connector import Connector
from struct import unpack
BAD_BANK_FILE_NAME = os.path.join(
os.path.dirname(__file__), 'resources/preset.syx')
BANK_FILE_NAME = os.path.join(os.path.dirname(__file__), 'resources/bank.syx')
BULK_FILE_NAME = os.path.join(os.path.dirname(__file__), 'resources/bulk.syx')
class Test(unittest.TestCase):
def setUp(self):
self.connector = Connector()
self.connector.port = Mock()
def test_get_panel_as_preset(self):
def return_value():
return [i for i in range(0, 192)]
self.connector.get_panel = Mock(side_effect=return_value)
value = self.connector.get_panel_as_preset(37)
self.connector.get_panel.assert_called_once()
self.assertEqual(value[2], 0x5)
self.assertEqual(value[4], 37)
def test_get_panel(self):
def return_value():
return [i for i in range(0, 192)]
self.connector.tx_message = Mock()
self.connector.rx_message = Mock(side_effect=return_value)
value = self.connector.get_panel()
self.connector.tx_message.assert_called_once_with(
phatty.connector.REQUEST_PANEL)
self.connector.rx_message.assert_called_once()
self.assertEqual(value, return_value())
def test_get_preset(self):
def return_value():
return [i for i in range(0, 192)]
self.connector.tx_message = Mock()
self.connector.rx_message = Mock(side_effect=return_value)
value = self.connector.get_preset(37)
msg = []
msg.extend(phatty.connector.REQUEST_PATCH)
msg[phatty.connector.REQ_PATCH_BYTE] = 37
self.connector.tx_message.assert_called_once_with(msg)
self.connector.rx_message.assert_called_once()
self.assertEqual(value, return_value())
def test_set_preset(self):
self.connector.port.send = Mock()
self.connector.set_preset(37)
msg = Message('program_change', channel=0, program=37)
self.connector.port.send.assert_called_once_with(msg)
def test_set_bulk(self):
try:
data = []
data.extend(phatty.connector.BULK_START)
data.extend([0] * (phatty.connector.BULK_SIZE -
len(phatty.connector.BULK_START)))
self.connector.tx_message = Mock()
self.connector.set_bulk(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bulk_red(self):
try:
data = []
data.extend(phatty.connector.BULK_START)
data.extend([0] * (phatty.connector.RED_BULK_SIZE -
len(phatty.connector.BULK_START)))
self.connector.tx_message = Mock()
self.connector.set_bulk(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bulk_fail(self):
try:
data = []
self.connector.set_bulk(data)
self.assertTrue(False)
except ValueError as e:
self.assertTrue(str(e) == phatty.connector.INVALID_BULK_FILE)
def test_set_bank(self):
try:
data = []
data.extend(phatty.connector.BANK_START)
data.extend([0] * (phatty.connector.BANK_SIZE -
len(phatty.connector.BANK_START)))
self.connector.tx_message = Mock()
self.connector.set_bank(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bank_red(self):
try:
data = []
data.extend(phatty.connector.BANK_START)
data.extend([0] * (phatty.connector.RED_BANK_SIZE -
len(phatty.connector.BANK_START)))
self.connector.tx_message = Mock()
self.connector.set_bank(data)
self.connector.tx_message.assert_called_once_with(data)
except ValueError as e:
self.assertTrue(False)
def test_set_bank_fail(self):
try:
data = []
self.connector.set_bank(data)
self.assertTrue(False)
except ValueError as e:
self.assertTrue(str(e) == phatty.connector.INVALID_BANK_FILE)
def set_bank_from_file(self, filename):
data = mido.read_syx_file(filename)[0].bytes()
data = data[1:len(data) - 1]
self.connector.set_bank_from_file(filename)
return data
def test_set_bank_from_bank_file(self):
self.connector.set_bank = Mock()
data = self.set_bank_from_file(BANK_FILE_NAME)
self.connector.set_bank.assert_called_once_with(data)
def test_set_bank_from_bulk_file(self):
self.connector.set_bank = Mock(side_effect=ValueError)
self.connector.set_bulk = Mock()
data = self.set_bank_from_file(BULK_FILE_NAME)
self.connector.set_bank.assert_called_once_with(data)
self.connector.set_bulk.assert_called_once_with(data)
def test_set_bank_from_bank_file_error(self):
try:
self.connector.set_bank = Mock(side_effect=ValueError)
self.connector.set_bank_from_file(BAD_BANK_FILE_NAME)
self.assertTrue(False)
except ValueError:
self.assertTrue(True)
def test_write_data_to_file(self):
data = [1, 2, 3]
filename = 'foo'
messages = [Message('sysex', data=data)]
mido.write_syx_file = Mock()
self.connector.write_data_to_file(filename, data)
mido.write_syx_file.assert_called_once_with(filename, messages)
def return_sysex(filename):
data = [1, 2, 3]
return [Message('sysex', data=data)]
@mock.patch('mido.read_syx_file', side_effect=return_sysex)
def test_read_data_from_file(self, mock):
filename = 'foo'
data = self.connector.read_data_from_file(filename)
mido.read_syx_file.assert_called_once_with(filename)
self.assertEqual(data, [1, 2, 3])
def test_set_panel_name(self):
name = 'ABCabc123'
calls = []
calls.append(call(
Message('control_change', channel=0, control=119, value=0)))
calls.append(call(
Message('control_change', channel=0, control=66, value=19)))
calls.append(call(
Message('control_change', channel=0, control=66, value=15)))
calls.append(call(
Message('control_change', channel=0, control=66, value=13)))
calls.append(call(
Message('control_change', channel=0, control=66, value=1)))
for c in name:
calls.append(call(
Message('control_change', channel=0, control=66, value=ord(c))))
self.connector.port.send = Mock()
self.connector.set_panel_name(name)
self.connector.port.send.assert_has_calls(calls, any_order=False)
def check_send_message(self, function, control, array):
for i in range(0, len(array)):
message = Message('control_change', channel=0,
control=control, value=array[i])
self.connector.port.send = Mock()
function(i)
self.connector.port.send.assert_called_once_with(message)
def test_set_lfo_midi_sync(self):
self.check_send_message(
self.connector.set_lfo_midi_sync, 102, phatty.connector.LFO_MIDI_SYNC_VALUES)
def test_set_panel_filter_poles(self):
self.check_send_message(
self.connector.set_panel_filter_poles, 109, phatty.connector.FILTER_POLES_VALUES)
def test_set_panel_vel_to_filter(self):
self.check_send_message(
self.connector.set_panel_vel_to_filter, 110, phatty.connector.VEL_TO_FILTER_VALUES)
def test_set_panel_vel_to_amp(self):
self.check_send_message(
self.connector.set_panel_vel_to_amp, 92, phatty.connector.VEL_TO_AMP_VALUES)
def test_set_panel_release(self):
self.check_send_message(
self.connector.set_panel_release, 88, phatty.connector.RELEASE_VALUES)
def test_set_panel_scale(self):
self.check_send_message(
self.connector.set_panel_scale, 113, phatty.connector.SCALE_VALUES)
def test_set_panel_pw_up_amount(self):
self.check_send_message(
self.connector.set_panel_pw_up_amount, 107, phatty.connector.PW_VALUES)
def test_set_panel_pw_down_amount(self):
self.check_send_message(
self.connector.set_panel_pw_down_amount, 108, phatty.connector.PW_VALUES)
def test_set_panel_legato(self):
self.check_send_message(
self.connector.set_panel_legato, 112, phatty.connector.LEGATO_VALUES)
def test_set_panel_keyboard_priority(self):
self.check_send_message(
self.connector.set_panel_keyboard_priority, 111, phatty.connector.KEYBOARD_PRIORITY_VALUES)
def test_set_panel_glide_on_legato(self):
self.check_send_message(
self.connector.set_panel_glide_on_legato, 94, phatty.connector.RELEASE_VALUES)
def test_set_panel_mod_source_5(self):
self.check_send_message(
self.connector.set_panel_mod_source_5, 104, phatty.connector.MOD_SRC_5_VALUES)
def test_set_panel_mod_source_6(self):
self.check_send_message(
self.connector.set_panel_mod_source_6, 105, phatty.connector.MOD_SRC_6_VALUES)
def test_set_panel_mod_dest_2(self):
self.check_send_message(
self.connector.set_panel_mod_dest_2, 106, phatty.connector.MOD_DEST_2_VALUES)
def test_set_panel_lfo_key_retrigger(self):
self.check_send_message(
self.connector.set_panel_lfo_key_retrigger, 93, phatty.connector.LFO_RETRIGGER_VALUES)
def test_set_panel_arp_octaves(self):
self.check_send_message(
self.connector.set_panel_arp_pattern, 117, phatty.connector.ARP_PATTERN_VALUES)
def test_set_panel_arp_mode(self):
self.check_send_message(
self.connector.set_panel_arp_mode, 118, phatty.connector.ARP_MODE_VALUES)
def test_set_panel_arp_octaves(self):
self.check_send_message(
self.connector.set_panel_arp_octaves, 116, phatty.connector.ARP_OCTAVES_VALUES)
def test_set_panel_arp_gate(self):
self.check_send_message(
self.connector.set_panel_arp_gate, 95, phatty.connector.ARP_GATE_VALUES)
def test_set_panel_arp_clock_source(self):
self.check_send_message(
self.connector.set_panel_arp_clock_source, 114, phatty.connector.ARP_CLOCK_SOURCE_VALUES)
def test_set_panel_arp_clock_division(self):
self.check_send_message(
self.connector.set_panel_arp_clock_division, 115, phatty.connector.ARP_CLOCK_DIVISION_VALUES)
| dagargo/phatty | tests/test_connector.py | Python | gpl-3.0 | 11,804 | 0.001779 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import action_chain_runner as acr
from st2actions.container.service import RunnerContainerService
from st2common.constants.action import LIVEACTION_STATUS_RUNNING
from st2common.constants.action import LIVEACTION_STATUS_SUCCEEDED
from st2common.constants.action import LIVEACTION_STATUS_CANCELED
from st2common.constants.action import LIVEACTION_STATUS_TIMED_OUT
from st2common.constants.action import LIVEACTION_STATUS_FAILED
from st2common.exceptions import actionrunner as runnerexceptions
from st2common.models.api.notification import NotificationsHelper
from st2common.models.db.liveaction import LiveActionDB
from st2common.models.db.keyvalue import KeyValuePairDB
from st2common.models.system.common import ResourceReference
from st2common.persistence.keyvalue import KeyValuePair
from st2common.persistence.runner import RunnerType
from st2common.services import action as action_service
from st2common.util import action_db as action_db_util
from st2common.exceptions.action import ParameterRenderingFailedException
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
class DummyActionExecution(object):
def __init__(self, status=LIVEACTION_STATUS_SUCCEEDED, result=''):
self.id = None
self.status = status
self.result = result
FIXTURES_PACK = 'generic'
TEST_MODELS = {
'actions': ['a1.yaml', 'a2.yaml', 'action_4_action_context_param.yaml'],
'runners': ['testrunner1.yaml']
}
MODELS = FixturesLoader().load_models(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_MODELS)
ACTION_1 = MODELS['actions']['a1.yaml']
ACTION_2 = MODELS['actions']['a2.yaml']
ACTION_3 = MODELS['actions']['action_4_action_context_param.yaml']
RUNNER = MODELS['runners']['testrunner1.yaml']
CHAIN_1_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain1.yaml')
CHAIN_2_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain2.yaml')
CHAIN_ACTION_CALL_NO_PARAMS_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_call_no_params.yaml')
CHAIN_NO_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain.yaml')
CHAIN_NO_DEFAULT_2 = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'no_default_chain_2.yaml')
CHAIN_BAD_DEFAULT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'bad_default_chain.yaml')
CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_success_path_static_task_name.yaml')
CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_broken_on_failure_path_static_task_name.yaml')
CHAIN_FIRST_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_first_task_parameter_render_fail.yaml')
CHAIN_SECOND_TASK_RENDER_FAIL_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_second_task_parameter_render_fail.yaml')
CHAIN_LIST_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_list_template.yaml')
CHAIN_DICT_TEMP_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dict_template.yaml')
CHAIN_DEP_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dependent_input.yaml')
CHAIN_DEP_RESULTS_INPUT = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_dep_result_input.yaml')
MALFORMED_CHAIN_PATH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'malformedchain.yaml')
CHAIN_TYPED_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_params.yaml')
CHAIN_SYSTEM_PARAMS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_typed_system_params.yaml')
CHAIN_WITH_ACTIONPARAM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_actionparam_vars.yaml')
CHAIN_WITH_SYSTEM_VARS = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_system_vars.yaml')
CHAIN_WITH_PUBLISH = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_publish.yaml')
CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_publish_params_rendering_failure.yaml')
CHAIN_WITH_INVALID_ACTION = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_with_invalid_action.yaml')
CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_and_parameters.yaml')
CHAIN_ACTION_PARAMS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_params_attribute.yaml')
CHAIN_ACTION_PARAMETERS_ATTRIBUTE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_action_parameters_attribute.yaml')
CHAIN_ACTION_INVALID_PARAMETER_TYPE = FixturesLoader().get_fixture_file_path_abs(
FIXTURES_PACK, 'actionchains', 'chain_invalid_parameter_type_passed_to_action.yaml')
CHAIN_NOTIFY_API = {'notify': {'on-complete': {'message': 'foo happened.'}}}
CHAIN_NOTIFY_DB = NotificationsHelper.to_model(CHAIN_NOTIFY_API)
@mock.patch.object(action_db_util, 'get_runnertype_by_name',
mock.MagicMock(return_value=RUNNER))
class TestActionChainRunner(DbTestCase):
def test_runner_creation(self):
runner = acr.get_runner()
self.assertTrue(runner)
self.assertTrue(runner.runner_id)
def test_malformed_chain(self):
try:
chain_runner = acr.get_runner()
chain_runner.entry_point = MALFORMED_CHAIN_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
self.assertTrue(False, 'Expected pre_run to fail.')
except runnerexceptions.ActionRunnerPreRunError:
self.assertTrue(True)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_chain_second_task_times_out(self, request):
# Second task in the chain times out so the action chain status should be timeout
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_2_PATH
chain_runner.action = ACTION_1
original_run_action = chain_runner._run_action
def mock_run_action(*args, **kwargs):
original_live_action = args[0]
liveaction = original_run_action(*args, **kwargs)
if original_live_action.action == 'wolfpack.a2':
# Mock a timeout for second task
liveaction.status = LIVEACTION_STATUS_TIMED_OUT
return liveaction
chain_runner._run_action = mock_run_action
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_TIMED_OUT)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_task_is_canceled_while_running(self, request):
# Second task in the action is CANCELED, make sure runner doesn't get stuck in an infinite
# loop
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_2_PATH
chain_runner.action = ACTION_1
original_run_action = chain_runner._run_action
def mock_run_action(*args, **kwargs):
original_live_action = args[0]
if original_live_action.action == 'wolfpack.a2':
status = LIVEACTION_STATUS_CANCELED
else:
status = LIVEACTION_STATUS_SUCCEEDED
request.return_value = (DummyActionExecution(status=status), None)
liveaction = original_run_action(*args, **kwargs)
return liveaction
chain_runner._run_action = mock_run_action
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_CANCELED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# Chain count should be 2 since the last task doesn't get called since the second one was
# canceled
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_success_task_action_call_with_no_params(self, request):
# Make sure that the runner doesn't explode if task definition contains
# no "params" section
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_CALL_NO_PARAMS_PATH
chain_runner.action = ACTION_1
action_ref = ResourceReference.to_string_reference(name=ACTION_1.name,
pack=ACTION_1.pack)
chain_runner.liveaction = LiveActionDB(action=action_ref)
chain_runner.liveaction.notify = CHAIN_NOTIFY_DB
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_no_default(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_NO_DEFAULT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# In case of this chain default_node is the first_node.
default_node = chain_runner.chain_holder.actionchain.default
first_node = chain_runner.chain_holder.actionchain.chain[0]
self.assertEqual(default_node, first_node.name)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_no_default_multiple_options(self, request):
# subtle difference is that when there are multiple possible default nodes
# the order per chain definition may not be preseved. This is really a
# poorly formatted chain but we still the best attempt to work.
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_NO_DEFAULT_2
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# In case of this chain default_node is the first_node.
default_node = chain_runner.chain_holder.actionchain.default
first_node = chain_runner.chain_holder.actionchain.chain[0]
self.assertEqual(default_node, first_node.name)
# based on the chain the callcount is known to be 2.
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_bad_default(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BAD_DEFAULT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = 'Unable to find node with name "bad_default" referenced in "default".'
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch('eventlet.sleep', mock.MagicMock())
@mock.patch.object(action_db_util, 'get_liveaction_by_id', mock.MagicMock(
return_value=DummyActionExecution()))
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(status=LIVEACTION_STATUS_RUNNING), None))
def test_chain_runner_success_path_with_wait(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 3. Not great but works.
self.assertEqual(request.call_count, 3)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(status=LIVEACTION_STATUS_FAILED), None))
def test_chain_runner_failure_path(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, _, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 2. Not great but works.
self.assertEqual(request.call_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(), None))
def test_chain_runner_broken_on_success_path_static_task_name(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BROKEN_ON_SUCCESS_PATH_STATIC_TASK_NAME
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Unable to find node with name "c5" referenced in "on-success" '
'in task "c2"')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(), None))
def test_chain_runner_broken_on_failure_path_static_task_name(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_BROKEN_ON_FAILURE_PATH_STATIC_TASK_NAME
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Unable to find node with name "c6" referenced in "on-failure" '
'in task "c2"')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError,
expected_msg, chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', side_effect=RuntimeError('Test Failure.'))
def test_chain_runner_action_exception(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_1_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, results, _ = chain_runner.run({})
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
# based on the chain the callcount is known to be 2. Not great but works.
self.assertEqual(request.call_count, 2)
error_count = 0
for task_result in results['tasks']:
if task_result['result'].get('error', None):
error_count += 1
self.assertEqual(error_count, 2)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_str_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, {"p1": "1"})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_list_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_LIST_TEMP_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, {"p1": "[2, 3, 4]"})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_dict_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DICT_TEMP_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {"p1": {"p1.3": "[3, 4]", "p1.2": "2", "p1.1": "1"}}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'o1': '1'}), None))
def test_chain_runner_dependent_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DEP_INPUT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 2, 's3': 3, 's4': 4})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_values = [{u'p1': u'1'},
{u'p1': u'1'},
{u'p2': u'1', u'p3': u'1', u'p1': u'1'}]
# Each of the call_args must be one of
for call_args in request.call_args_list:
self.assertTrue(call_args[0][0].parameters in expected_values)
expected_values.remove(call_args[0][0].parameters)
self.assertEqual(len(expected_values), 0, 'Not all expected values received.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'o1': '1'}), None))
def test_chain_runner_dependent_results_param(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_DEP_RESULTS_INPUT
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_values = [{u'p1': u'1'},
{u'p1': u'1'},
{u'out': u"{'c2': {'o1': '1'}, 'c1': {'o1': '1'}}"}]
# Each of the call_args must be one of
self.assertEqual(request.call_count, 3)
for call_args in request.call_args_list:
self.assertTrue(call_args[0][0].parameters in expected_values)
expected_values.remove(call_args[0][0].parameters)
self.assertEqual(len(expected_values), 0, 'Not all expected values received.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(RunnerType, 'get_by_name',
mock.MagicMock(return_value=RUNNER))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_missing_param_temp(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertEqual(request.call_count, 0, 'No call expected.')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_failure_during_param_rendering_single_task(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_FIRST_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, result, _ = chain_runner.run({})
# No tasks ran because rendering of parameters for the first task failed
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertEqual(result['tasks'], [])
self.assertTrue('error' in result)
self.assertTrue('traceback' in result)
self.assertTrue('Failed to run task "c1". Parameter rendering failed' in result['error'])
self.assertTrue('Traceback' in result['traceback'])
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_failure_during_param_rendering_multiple_tasks(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_SECOND_TASK_RENDER_FAIL_PATH
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
status, result, _ = chain_runner.run({})
# Verify that only first task has ran
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertEqual(len(result['tasks']), 1)
self.assertEqual(result['tasks'][0]['name'], 'c1')
expected_error = ('Failed rendering value for action parameter "p1" in '
'task "c2" (template string={{s1}}):')
self.assertTrue('error' in result)
self.assertTrue('traceback' in result)
self.assertTrue('Failed to run task "c2". Parameter rendering failed' in result['error'])
self.assertTrue(expected_error in result['error'])
self.assertTrue('Traceback' in result['traceback'])
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_typed_params(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_TYPED_PARAMS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'s1': 1, 's2': 'two', 's3': 3.14})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'booltype': True,
'inttype': 1,
'numbertype': 3.14,
'strtype': 'two',
'arrtype': ['1', 'two'],
'objtype': {'s2': 'two',
'k1': '1'}}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_typed_system_params(self, request):
kvps = []
try:
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a', value='1')))
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a.b.c', value='two')))
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_SYSTEM_PARAMS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two'}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
finally:
for kvp in kvps:
KeyValuePair.delete(kvp)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_vars_system_params(self, request):
kvps = []
try:
kvps.append(KeyValuePair.add_or_update(KeyValuePairDB(name='a', value='two')))
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_SYSTEM_VARS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two',
'strtype_legacy': 'two',
'booltype': True}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
finally:
for kvp in kvps:
KeyValuePair.delete(kvp)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_vars_action_params(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_ACTIONPARAM_VARS
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
chain_runner.run({'input_a': 'two'})
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'two',
'booltype': True}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'raw_out': 'published'}), None))
def test_chain_runner_publish(self, request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_PUBLISH
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.runner_parameters = {'display_published': True}
chain_runner.pre_run()
action_parameters = {'action_param_1': 'test value 1'}
_, result, _ = chain_runner.run(action_parameters=action_parameters)
# We also assert that the action parameters are available in the
# "publish" scope
self.assertNotEqual(chain_runner.chain_holder.actionchain, None)
expected_value = {'inttype': 1,
'strtype': 'published',
'booltype': True,
'published_action_param': action_parameters['action_param_1']}
mock_args, _ = request.call_args
self.assertEqual(mock_args[0].parameters, expected_value)
# Assert that the variables are correctly published
self.assertEqual(result['published'],
{'published_action_param': u'test value 1', 'o1': u'published'})
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_runner_publish_param_rendering_failure(self, request):
# Parameter rendering should result in a top level error which aborts
# the whole chain
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_PUBLISH_PARAM_RENDERING_FAILURE
chain_runner.action = ACTION_1
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
try:
chain_runner.run({})
except ParameterRenderingFailedException as e:
# TODO: Should we treat this as task error? Right now it bubbles all
# the way up and it's not really consistent with action param
# rendering failure
expected_error = ('Failed rendering value for publish parameter "p1" in '
'task "c2" (template string={{ not_defined }}):')
self.assertTrue(expected_error in str(e))
pass
else:
self.fail('Exception was not thrown')
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_2))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_chain_task_passes_invalid_parameter_type_to_action(self, mock_request):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_INVALID_PARAMETER_TYPE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
action_parameters = {}
expected_msg = ('Failed to cast value "stringnotanarray" \(type: str\) for parameter '
'"arrtype" of type "array"')
self.assertRaisesRegexp(ValueError, expected_msg, chain_runner.run,
action_parameters=action_parameters)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=None))
@mock.patch.object(action_service, 'request',
return_value=(DummyActionExecution(result={'raw_out': 'published'}), None))
def test_action_chain_runner_referenced_action_doesnt_exist(self, mock_request):
# Action referenced by a task doesn't exist, should result in a top level error
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_WITH_INVALID_ACTION
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
expected_error = ('Failed to run task "c1". Action with reference "wolfpack.a2" '
'doesn\'t exist.')
self.assertEqual(status, LIVEACTION_STATUS_FAILED)
self.assertTrue(expected_error in output['error'])
self.assertTrue('Traceback' in output['traceback'], output['traceback'])
def test_exception_is_thrown_if_both_params_and_parameters_attributes_are_provided(self):
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMS_AND_PARAMETERS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
expected_msg = ('Either "params" or "parameters" attribute needs to be provided, but '
'not both')
self.assertRaisesRegexp(runnerexceptions.ActionRunnerPreRunError, expected_msg,
chain_runner.pre_run)
@mock.patch.object(action_db_util, 'get_action_by_ref',
mock.MagicMock(return_value=ACTION_1))
@mock.patch.object(action_service, 'request', return_value=(DummyActionExecution(), None))
def test_params_and_parameters_attributes_both_work(self, _):
# "params" attribute used
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
original_build_liveaction_object = chain_runner._build_liveaction_object
def mock_build_liveaction_object(action_node, resolved_params, parent_context):
# Verify parameters are correctly passed to the action
self.assertEqual(resolved_params, {'pparams': 'v1'})
original_build_liveaction_object(action_node=action_node,
resolved_params=resolved_params,
parent_context=parent_context)
chain_runner._build_liveaction_object = mock_build_liveaction_object
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
# "parameters" attribute used
chain_runner = acr.get_runner()
chain_runner.entry_point = CHAIN_ACTION_PARAMETERS_ATTRIBUTE
chain_runner.action = ACTION_2
chain_runner.container_service = RunnerContainerService()
chain_runner.pre_run()
def mock_build_liveaction_object(action_node, resolved_params, parent_context):
# Verify parameters are correctly passed to the action
self.assertEqual(resolved_params, {'pparameters': 'v1'})
original_build_liveaction_object(action_node=action_node,
resolved_params=resolved_params,
parent_context=parent_context)
chain_runner._build_liveaction_object = mock_build_liveaction_object
action_parameters = {}
status, output, _ = chain_runner.run(action_parameters=action_parameters)
self.assertEqual(status, LIVEACTION_STATUS_SUCCEEDED)
@classmethod
def tearDownClass(cls):
FixturesLoader().delete_models_from_db(MODELS)
| punalpatel/st2 | contrib/runners/action_chain_runner/tests/unit/test_actionchain.py | Python | apache-2.0 | 40,041 | 0.002273 |
# Various functions for printing various specific values
# in human-readable format
import sys
import time
import pprint
# pretty print object
pp = pprint.PrettyPrinter(indent=4)
def pretty_print(value):
pp.pprint(value)
# print timedelta, provided in seconds,
# in human-readable format
def print_elapsed_time(timedelta):
gm_timedelta = time.gmtime(timedelta)
hours = int(time.strftime('%H', gm_timedelta))
minutes = int(time.strftime('%M', gm_timedelta))
seconds = int(time.strftime('%S', gm_timedelta))
print('Total time elapsed: ', end='')
if hours > 0:
print('{0} hours, '.format(hours), end='')
if minutes > 0:
print('{0} minutes, '.format(minutes), end='')
print('{0} seconds.'.format(seconds), end='')
print()
def print_progress(cur_value, max_value, width=72):
"""Print progress bar in form: [###-------]."""
progress = int((cur_value * 100) / max_value)
# effective width -- width of bar without brackets
e_width = width - 2
# number of "#" in bar
num_hashes = int((cur_value * e_width) / max_value)
num_minuses = e_width - num_hashes
sys.stdout.write('\r[{hashes}{minuses}] '
'{percentage}%'.format(hashes='#' * num_hashes,
minuses='-' * num_minuses,
percentage=progress))
sys.stdout.flush()
| budnyjj/vkstat | utils/print.py | Python | mit | 1,418 | 0.000705 |
# Generated by Django 2.0.10 on 2019-03-12 17:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('erudit', '0110_auto_20181123_1558'),
]
operations = [
migrations.AlterField(
model_name='issue',
name='is_published',
field=models.BooleanField(default=False, verbose_name='Est publié'),
),
]
| erudit/zenon | eruditorg/erudit/migrations/0111_auto_20190312_1251.py | Python | gpl-3.0 | 422 | 0.002375 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Manage modules
"""
import sys, os
import time
import grp
#from subprocess import Popen, PIPE
from pathlib import Path
import shutil
from getpass import getpass
__appname__ = "manager"
__licence__ = "none"
__version__ = "0.1"
__author__ = "Benoit Guibert <benoit.guibert@free.fr>"
__shortdesc__ = "Manage {} modules".format(sys.argv[0].split('/')[-1])
__opts__ = []
def autocomplete (parent):
### Build autocompletion file
module_path = "/".join(os.path.realpath(__file__).split('/')[:-2])
modules = " ".join(os.listdir(module_path))
# content of autocompletion file
""" Function doc """
content = """# Build by {parent} to uodate module completion
_{parent}()
{op}
local cur prev opts
COMPREPLY=()
cur="${op}COMP_WORDS[COMP_CWORD]{cp}"
prev="${op}COMP_WORDS[COMP_CWORD-1]{cp}"
opts="{modules}"
case $prev in
{parent})
COMPREPLY=( $(compgen -W "${op}opts{cp}" -- ${op}cur{cp}) )
;;
esac
return 0
{cp}
complete -F _{parent} -o default {parent}
""".format(parent=parent, modules=modules, op='{', cp='}')
### check if bash_completion is here
autocomplete_dir = str(Path.home()) + '/.bashrc.d'
if not os.path.exists(autocomplete_dir):
os.makedirs(autocomplete_dir)
### Modify .bashrc if not entry
bashrc_file = str(Path.home()) + '/.bashrc'
keyword = '.bashrc.d/' + parent + '_completion'
print(keyword)
bashrc_new_header = "\n# build by {parent}, do not change it!\n".format(parent=parent)
bashrc_new_body = "[ -d $HOME/.bashrc.d ] && source $HOME/.bashrc.d/{parent}_completion\n".format(parent=parent)
with open( bashrc_file, 'r+') as stream:
bashrc = stream.read()
if not keyword in bashrc:
stream.write(bashrc_new_header + bashrc_new_body)
### Write completion file
bold = '\033[1m'
end = '\033[0m'
completion_file = autocomplete_dir + '/' + parent + '_completion'
with open(completion_file, 'w') as file:
file.write(content)
print('\nPlease execute :\n{bold}source {comp_file}{end}\nto refresh {parent} completion\n'.
format(comp_file=completion_file, parent=parent, bold=bold, end=end))
def appContent(parent, appname, shortdesc):
newappcontent = """#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys, os
import argparse
__appname__ = "{app}"
__licence__ = "none"
__version__ = "0.1"
__date__ = '{date}'
__modified__ = '{date}'
__author__ = "John Doe <john.doe@exemple.com>"
__shortdesc__ = "{desc}"
def usage(appname):
# https://docs.python.org/3/howto/argparse.html?highlight=argparse
# add a sub-name in --help (like: {parent} sub-name options) when command not in standalone
# and delete sub-name in sys.args (to avoid confusions with arguments)
subname = ""
if not __appname__ in sys.argv[0] and __appname__ in sys.argv[1]:
subname = "{par}".format(__appname__)
del sys.argv[1]
usage = ('{ret}{par}{nwl}{par}.' # {ret}{par} to replace the header 'usage:'
'{nwl}Version: {par}{nwl}{nwl}' # version number
' usage: %(prog)s {par} options' # usage : prog [sub-name] options...
.format(''.ljust(len('usage:')), __shortdesc__, __version__, subname)
)
parser = argparse.ArgumentParser(usage=usage)
### OPTION
parser.add_argument("option1", # mandatory positional argument
help = 'mandatory file (one or more)', # help text
nargs = "+", # argument options number
metavar = ("file_1 [file_n]"), # option name to display
)
### ARGUMENT WITH OPTION
parser.add_argument("-g", "--genome",
help = "reference genome (fasta file)",
metavar = 'genome',
nargs = 1,
required = True,
)
### ARGUMENT WITHOUT OPTION
parser.add_argument('--verbose', # positional argument
action = "store_true", # argument doesn't need option, i.e. tag
help = "Increase volubility",
)
### ARGUMENT WITH PREDEFINED OPTION
parser.add_argument("-n", "--number", # positional argument
type = int, # must be an integer
choices = [1,2,3], # between 1 and 3
help = "a number from 1 to 3",
)
### VERSIONNING
parser.add_argument('-v', '--version', # positional argument
action='version', # action "version" is executed and programm stop
version="%(prog)s version: {par}".format(__version__) # the action "version"
)
### Go to "usage()" without arguments
if len(sys.argv) == 1: # or (__appname__ != appname and len(sys.argv) == 2):
parser.print_help()
sys.exit(1)
return parser.parse_args(), subname
def main(appname):
args, module = usage(appname)
print("Application: ", appname, module)
print(args)
print("Work in progress...")
if __name__ == "__main__":
main(__appname__)
""".format(date=time.strftime('%Y-%m-%d') , parent=parent, app=appname,desc=shortdesc, par="{}", nwl="\\n", ret="\\r", tab="\\t" )
return newappcontent
def writeApp(appname, parent, shortdesc):
# trouver l'emplacement de l'application parent
modulesdir = (os.path.dirname((os.path.dirname(__file__))))
# vérifier les droits d'écriture dans le répertoire
# ...
parentdir = os.path.dirname(modulesdir)
# copier l'app dans <parent>/modules/<app>/<app.py>
appdir = modulesdir + "/" + appname
appfull = modulesdir + "/" + appname + "/" + appname + ".py"
if os.path.isdir(appdir):
print("\n Module '{}' still exists, abort...".format(appname))
print(" Remove '{}' directory to continue\n".format(appdir))
sys.exit()
os.mkdir(appdir)
with open( appfull, "w") as fic:
fic.write(appContent(parent, appname, shortdesc))
st = os.stat(appfull)
os.chmod(appfull, st.st_mode | 0o111)
# modifier le fichier <parent>/bash_completion.d/bentools
# si root ou sudo, écraer le /etc/bash_completion.d/bentools
# recharger le fichier /etc/bash_completion.d/bentools
# Afficher un message indiquant où se trouve l'app
print("\nModule {} has been created in directory {}".format(appname, appdir))
return True
def deleteApp (appname, parent, shortdesc):
""" Function doc """
# trouver l'emplacement de l'application parent
modulesdir = (os.path.dirname((os.path.dirname(__file__))))
# vérifier les droits d'écriture dans le répertoire
# ...
parentdir = os.path.dirname(modulesdir)
# copier l'app dans <parent>/modules/<app>/<app.py>
appdir = modulesdir + "/" + appname
if not os.path.isdir(appdir):
print("\n Module '{}' not found, abort...\n".format(appname))
sys.exit()
shutil.rmtree(appdir)
print('\nModule {} has been deleted'.format(appname))
return True
def argsChk(parent):
args = sys.argv[1:] if __appname__ in sys.argv[0] else sys.argv[2:]
"""checks arguments"""
if "-h" in args:
__opts__.append("-h")
args.remove("-h")
helpme(parent)
try:
if '--add' in args:
ind = args.index('--add')
return { 'type':'add', 'name': args[ind+1]}
if '--del' in args:
ind = args.index('--del')
return { 'type':'del', 'name': args[ind+1]}
if '--complete' in args:
ind = args.index('--complete')
return { 'type':'complete'}
except IndexError:
helpme(parent)
if len(args) != 2:
helpme(parent)
return[args[0]]
def helpme(parent):
print("\n{}\n".format(__shortdesc__))
if parent == __appname__:
print("Usage:")
print(" {} --add <app_name>\t: append a new app".format(__appname__))
print(" {} --del <app_name>\t: delete an app".format(__appname__))
print(" {} --complete\t\t: update auto-completion\n".format(__appname__))
else:
print("Usage:")
print(" {} {} --add <app_name>\t: append a new app".format(parent, __appname__))
print(" {} {} --del <app_name>\t: delete an app".format(parent, __appname__))
print(" {} {} --complete\t\t: update auto-completion\n".format(parent, __appname__))
sys.exit()
def main(parent):
args = argsChk(parent)
shortdesc="short app description" # TODO : ask for short description
complete_ok = False # to autocomplete
if args['type'] == 'add':
complete_ok = writeApp(args['name'], parent, shortdesc)
if args['type'] == 'del':
complete_ok = deleteApp(args['name'], parent, shortdesc)
if args['type'] == 'complete':
complete_ok = True
if complete_ok: autocomplete(parent)
if __name__ == "__main__":
main(__appname__) # required
| hetica/bentools | modules/manager/manager.py | Python | gpl-3.0 | 9,426 | 0.006265 |
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
import urllib
from copy import deepcopy
from datetime import date
from logging import getLogger
from urlparse import urljoin
from sqlalchemy import and_
from ggrc import db
from ggrc import utils
from ggrc.models.revision import Revision
from ggrc.notifications import data_handlers
from ggrc.utils import merge_dicts, get_url_root
from ggrc_basic_permissions.models import Role, UserRole
from ggrc_workflows.models import Cycle
from ggrc_workflows.models import CycleTaskGroupObjectTask
from ggrc_workflows.models import Workflow
# pylint: disable=invalid-name
logger = getLogger(__name__)
"""
exposed functions
get_cycle_data,
get_workflow_data,
get_cycle_task_data,
"""
def get_cycle_created_task_data(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task:
logger.warning(
'%s for notification %s not found.',
notification.object_type, notification.id)
return {}
cycle_task_group = cycle_task.cycle_task_group
cycle = cycle_task_group.cycle
force = cycle.workflow.notify_on_change
task_assignee = data_handlers.get_person_dict(cycle_task.contact)
task_group_assignee = data_handlers.get_person_dict(cycle_task_group.contact)
workflow_owners = get_workflow_owners_dict(cycle.context_id)
task = {
cycle_task.id: get_cycle_task_dict(cycle_task)
}
result = {}
assignee_data = {
task_assignee['email']: {
"user": task_assignee,
"force_notifications": {
notification.id: force
},
"cycle_data": {
cycle.id: {
"my_tasks": deepcopy(task)
}
}
}
}
tg_assignee_data = {
task_group_assignee['email']: {
"user": task_group_assignee,
"force_notifications": {
notification.id: force
},
"cycle_data": {
cycle.id: {
"my_task_groups": {
cycle_task_group.id: deepcopy(task)
}
}
}
}
}
for workflow_owner in workflow_owners.itervalues():
wf_owner_data = {
workflow_owner['email']: {
"user": workflow_owner,
"force_notifications": {
notification.id: force
},
"cycle_data": {
cycle.id: {
"cycle_tasks": deepcopy(task)
}
}
}
}
result = merge_dicts(result, wf_owner_data)
return merge_dicts(result, assignee_data, tg_assignee_data)
def get_cycle_task_due(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task:
logger.warning(
'%s for notification %s not found.',
notification.object_type, notification.id)
return {}
if not cycle_task.contact:
logger.warning(
'Contact for cycle task %s not found.',
notification.object_id)
return {}
notif_name = notification.notification_type.name
due = "due_today" if notif_name == "cycle_task_due_today" else "due_in"
force = cycle_task.cycle_task_group.cycle.workflow.notify_on_change
return {
cycle_task.contact.email: {
"user": data_handlers.get_person_dict(cycle_task.contact),
"force_notifications": {
notification.id: force
},
due: {
cycle_task.id: get_cycle_task_dict(cycle_task)
}
}
}
def get_all_cycle_tasks_completed_data(notification, cycle):
workflow_owners = get_workflow_owners_dict(cycle.context_id)
force = cycle.workflow.notify_on_change
result = {}
for workflow_owner in workflow_owners.itervalues():
wf_data = {
workflow_owner['email']: {
"user": workflow_owner,
"force_notifications": {
notification.id: force
},
"all_tasks_completed": {
cycle.id: get_cycle_dict(cycle)
}
}
}
result = merge_dicts(result, wf_data)
return result
def get_cycle_created_data(notification, cycle):
if not cycle.is_current:
return {}
manual = notification.notification_type.name == "manual_cycle_created"
force = cycle.workflow.notify_on_change
result = {}
for user_role in cycle.workflow.context.user_roles:
person = user_role.person
result[person.email] = {
"user": data_handlers.get_person_dict(person),
"force_notifications": {
notification.id: force
},
"cycle_started": {
cycle.id: get_cycle_dict(cycle, manual)
}
}
return result
def get_cycle_data(notification):
cycle = get_object(Cycle, notification.object_id)
if not cycle:
return {}
notification_name = notification.notification_type.name
if notification_name in ["manual_cycle_created", "cycle_created"]:
return get_cycle_created_data(notification, cycle)
elif notification_name == "all_cycle_tasks_completed":
return get_all_cycle_tasks_completed_data(notification, cycle)
return {}
def get_cycle_task_declined_data(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task or not cycle_task.contact:
logger.warning(
'%s for notification %s not found.',
notification.object_type, notification.id)
return {}
force = cycle_task.cycle_task_group.cycle.workflow.notify_on_change
return {
cycle_task.contact.email: {
"user": data_handlers.get_person_dict(cycle_task.contact),
"force_notifications": {
notification.id: force
},
"task_declined": {
cycle_task.id: get_cycle_task_dict(cycle_task)
}
}
}
def get_cycle_task_data(notification):
cycle_task = get_object(CycleTaskGroupObjectTask, notification.object_id)
if not cycle_task or not cycle_task.cycle_task_group.cycle.is_current:
return {}
notification_name = notification.notification_type.name
if notification_name in ["manual_cycle_created", "cycle_created"]:
return get_cycle_created_task_data(notification)
elif notification_name == "cycle_task_declined":
return get_cycle_task_declined_data(notification)
elif notification_name in ["cycle_task_due_in",
"one_time_cycle_task_due_in",
"weekly_cycle_task_due_in",
"monthly_cycle_task_due_in",
"quarterly_cycle_task_due_in",
"annually_cycle_task_due_in",
"cycle_task_due_today"]:
return get_cycle_task_due(notification)
return {}
def get_workflow_starts_in_data(notification, workflow):
if workflow.status != "Active":
return {}
if (not workflow.next_cycle_start_date or
workflow.next_cycle_start_date < date.today()):
return {} # this can only be if the cycle has successfully started
result = {}
workflow_owners = get_workflow_owners_dict(workflow.context_id)
force = workflow.notify_on_change
for user_roles in workflow.context.user_roles:
wf_person = user_roles.person
result[wf_person.email] = {
"user": data_handlers.get_person_dict(wf_person),
"force_notifications": {
notification.id: force
},
"cycle_starts_in": {
workflow.id: {
"workflow_owners": workflow_owners,
"workflow_url": get_workflow_url(workflow),
"start_date": workflow.next_cycle_start_date,
"start_date_statement": utils.get_digest_date_statement(
workflow.next_cycle_start_date, "start", True),
"custom_message": workflow.notify_custom_message,
"title": workflow.title,
}
}
}
return result
def get_cycle_start_failed_data(notification, workflow):
if workflow.status != "Active":
return {}
if (not workflow.next_cycle_start_date or
workflow.next_cycle_start_date >= date.today()):
return {} # this can only be if the cycle has successfully started
result = {}
workflow_owners = get_workflow_owners_dict(workflow.context_id)
force = workflow.notify_on_change
for wf_owner in workflow_owners.itervalues():
result[wf_owner["email"]] = {
"user": wf_owner,
"force_notifications": {
notification.id: force
},
"cycle_start_failed": {
workflow.id: {
"workflow_owners": workflow_owners,
"workflow_url": get_workflow_url(workflow),
"start_date": workflow.next_cycle_start_date,
"start_date_statement": utils.get_digest_date_statement(
workflow.next_cycle_start_date, "start", True),
"custom_message": workflow.notify_custom_message,
"title": workflow.title,
}
}
}
return result
def get_workflow_data(notification):
workflow = get_object(Workflow, notification.object_id)
if not workflow:
return {}
if workflow.frequency == "one_time":
# one time workflows get cycles manually created and that triggers
# the instant notification.
return {}
if "_workflow_starts_in" in notification.notification_type.name:
return get_workflow_starts_in_data(notification, workflow)
if "cycle_start_failed" == notification.notification_type.name:
return get_cycle_start_failed_data(notification, workflow)
return {}
def get_object(obj_class, obj_id):
result = db.session.query(obj_class).filter(obj_class.id == obj_id)
if result.count() == 1:
return result.one()
return None
def get_workflow_owners_dict(context_id):
owners = db.session.query(UserRole).join(Role).filter(
and_(UserRole.context_id == context_id,
Role.name == "WorkflowOwner")).all()
return {user_role.person.id: data_handlers.get_person_dict(user_role.person)
for user_role in owners}
def _get_object_info_from_revision(revision, known_type):
""" returns type and id of the searched object, if we have one part of
the relationship known.
"""
object_type = revision.destination_type \
if revision.source_type == known_type \
else revision.source_type
object_id = revision.destination_id if \
revision.source_type == known_type \
else revision.source_id
return object_type, object_id
def get_cycle_task_dict(cycle_task):
object_titles = []
# every object should have a title or at least a name like person object
for related_object in cycle_task.related_objects:
object_titles.append(getattr(related_object, "title", "") or
getattr(related_object, "name", "") or
u"Untitled object")
# related objects might have been deleted or unmapped,
# check the revision history
deleted_relationships_sources = db.session.query(Revision).filter(
Revision.resource_type == "Relationship",
Revision.action == "deleted",
Revision.source_type == "CycleTaskGroupObjectTask",
Revision.source_id == cycle_task.id
)
deleted_relationships_destinations = db.session.query(Revision).filter(
Revision.resource_type == "Relationship",
Revision.action == "deleted",
Revision.destination_type == "CycleTaskGroupObjectTask",
Revision.destination_id == cycle_task.id
)
deleted_relationships = deleted_relationships_sources.union(
deleted_relationships_destinations).all()
for deleted_relationship in deleted_relationships:
removed_object_type, removed_object_id = _get_object_info_from_revision(
deleted_relationship, "CycleTaskGroupObjectTask")
object_data = db.session.query(Revision).filter(
Revision.resource_type == removed_object_type,
Revision.resource_id == removed_object_id,
).order_by(Revision.id.desc()).first()
object_titles.append(
u"{} [removed from task]".format(object_data.content["display_name"])
)
# the filter expression to be included in the cycle task's URL and
# automatically applied when user visits it
filter_exp = u"id=" + unicode(cycle_task.cycle_id)
return {
"title": cycle_task.title,
"related_objects": object_titles,
"end_date": cycle_task.end_date.strftime("%m/%d/%Y"),
"due_date_statement": utils.get_digest_date_statement(
cycle_task.end_date, "due"),
"cycle_task_url": get_cycle_task_url(cycle_task, filter_exp=filter_exp),
}
def get_cycle_dict(cycle, manual=False):
workflow_owners = get_workflow_owners_dict(cycle.context_id)
return {
"manually": manual,
"custom_message": cycle.workflow.notify_custom_message,
"cycle_title": cycle.title,
"workflow_owners": workflow_owners,
"cycle_url": get_cycle_url(cycle),
}
def get_workflow_url(workflow):
url = "workflows/{}#current_widget".format(workflow.id)
return urljoin(get_url_root(), url)
def get_cycle_task_url(cycle_task, filter_exp=u""):
if filter_exp:
filter_exp = u"?filter=" + urllib.quote(filter_exp)
url = (u"/workflows/{workflow_id}"
u"{filter_exp}"
u"#current_widget/cycle/{cycle_id}"
u"/cycle_task_group/{cycle_task_group_id}"
u"/cycle_task_group_object_task/{cycle_task_id}").format(
workflow_id=cycle_task.cycle_task_group.cycle.workflow.id,
filter_exp=filter_exp,
cycle_id=cycle_task.cycle_task_group.cycle.id,
cycle_task_group_id=cycle_task.cycle_task_group.id,
cycle_task_id=cycle_task.id,
)
return urljoin(get_url_root(), url)
def get_cycle_url(cycle):
url = "workflows/{workflow_id}#current_widget/cycle/{cycle_id}".format(
workflow_id=cycle.workflow.id,
cycle_id=cycle.id,
)
return urljoin(get_url_root(), url)
| plamut/ggrc-core | src/ggrc_workflows/notification/data_handler.py | Python | apache-2.0 | 14,000 | 0.007143 |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for flatbuffer_utils.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os
import subprocess
from tensorflow.lite.tools import flatbuffer_utils
from tensorflow.lite.tools import test_utils
from tensorflow.python.framework import test_util
from tensorflow.python.platform import test
class WriteReadModelTest(test_util.TensorFlowTestCase):
def testWriteReadModel(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
# Define temporary files
tmp_dir = self.get_temp_dir()
model_filename = os.path.join(tmp_dir, 'model.tflite')
# 2. INVOKE
# Invoke the write_model and read_model functions
flatbuffer_utils.write_model(initial_model, model_filename)
final_model = flatbuffer_utils.read_model(model_filename)
# 3. VALIDATE
# Validate that the initial and final models are the same
# Validate the description
self.assertEqual(initial_model.description, final_model.description)
# Validate the main subgraph's name, inputs, outputs, operators and tensors
initial_subgraph = initial_model.subgraphs[0]
final_subgraph = final_model.subgraphs[0]
self.assertEqual(initial_subgraph.name, final_subgraph.name)
for i in range(len(initial_subgraph.inputs)):
self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i])
for i in range(len(initial_subgraph.outputs)):
self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i])
for i in range(len(initial_subgraph.operators)):
self.assertEqual(initial_subgraph.operators[i].opcodeIndex,
final_subgraph.operators[i].opcodeIndex)
initial_tensors = initial_subgraph.tensors
final_tensors = final_subgraph.tensors
for i in range(len(initial_tensors)):
self.assertEqual(initial_tensors[i].name, final_tensors[i].name)
self.assertEqual(initial_tensors[i].type, final_tensors[i].type)
self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer)
for j in range(len(initial_tensors[i].shape)):
self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j])
# Validate the first valid buffer (index 0 is always None)
initial_buffer = initial_model.buffers[1].data
final_buffer = final_model.buffers[1].data
for i in range(initial_buffer.size):
self.assertEqual(initial_buffer.data[i], final_buffer.data[i])
class StripStringsTest(test_util.TensorFlowTestCase):
def testStripStrings(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
final_model = copy.deepcopy(initial_model)
# 2. INVOKE
# Invoke the strip_strings function
flatbuffer_utils.strip_strings(final_model)
# 3. VALIDATE
# Validate that the initial and final models are the same except strings
# Validate the description
self.assertNotEqual('', initial_model.description)
self.assertEqual('', final_model.description)
# Validate the main subgraph's name, inputs, outputs, operators and tensors
initial_subgraph = initial_model.subgraphs[0]
final_subgraph = final_model.subgraphs[0]
self.assertNotEqual('', initial_model.subgraphs[0].name)
self.assertEqual('', final_model.subgraphs[0].name)
for i in range(len(initial_subgraph.inputs)):
self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i])
for i in range(len(initial_subgraph.outputs)):
self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i])
for i in range(len(initial_subgraph.operators)):
self.assertEqual(initial_subgraph.operators[i].opcodeIndex,
final_subgraph.operators[i].opcodeIndex)
initial_tensors = initial_subgraph.tensors
final_tensors = final_subgraph.tensors
for i in range(len(initial_tensors)):
self.assertNotEqual('', initial_tensors[i].name)
self.assertEqual('', final_tensors[i].name)
self.assertEqual(initial_tensors[i].type, final_tensors[i].type)
self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer)
for j in range(len(initial_tensors[i].shape)):
self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j])
# Validate the first valid buffer (index 0 is always None)
initial_buffer = initial_model.buffers[1].data
final_buffer = final_model.buffers[1].data
for i in range(initial_buffer.size):
self.assertEqual(initial_buffer.data[i], final_buffer.data[i])
class RandomizeWeightsTest(test_util.TensorFlowTestCase):
def testRandomizeWeights(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
final_model = copy.deepcopy(initial_model)
# 2. INVOKE
# Invoke the randomize_weights function
flatbuffer_utils.randomize_weights(final_model)
# 3. VALIDATE
# Validate that the initial and final models are the same, except that
# the weights in the model buffer have been modified (i.e, randomized)
# Validate the description
self.assertEqual(initial_model.description, final_model.description)
# Validate the main subgraph's name, inputs, outputs, operators and tensors
initial_subgraph = initial_model.subgraphs[0]
final_subgraph = final_model.subgraphs[0]
self.assertEqual(initial_subgraph.name, final_subgraph.name)
for i in range(len(initial_subgraph.inputs)):
self.assertEqual(initial_subgraph.inputs[i], final_subgraph.inputs[i])
for i in range(len(initial_subgraph.outputs)):
self.assertEqual(initial_subgraph.outputs[i], final_subgraph.outputs[i])
for i in range(len(initial_subgraph.operators)):
self.assertEqual(initial_subgraph.operators[i].opcodeIndex,
final_subgraph.operators[i].opcodeIndex)
initial_tensors = initial_subgraph.tensors
final_tensors = final_subgraph.tensors
for i in range(len(initial_tensors)):
self.assertEqual(initial_tensors[i].name, final_tensors[i].name)
self.assertEqual(initial_tensors[i].type, final_tensors[i].type)
self.assertEqual(initial_tensors[i].buffer, final_tensors[i].buffer)
for j in range(len(initial_tensors[i].shape)):
self.assertEqual(initial_tensors[i].shape[j], final_tensors[i].shape[j])
# Validate the first valid buffer (index 0 is always None)
initial_buffer = initial_model.buffers[1].data
final_buffer = final_model.buffers[1].data
for j in range(initial_buffer.size):
self.assertNotEqual(initial_buffer.data[j], final_buffer.data[j])
class XxdOutputToBytesTest(test_util.TensorFlowTestCase):
def testXxdOutputToBytes(self):
# 1. SETUP
# Define the initial model
initial_model = test_utils.build_mock_model()
initial_bytes = flatbuffer_utils.convert_object_to_bytearray(initial_model)
# Define temporary files
tmp_dir = self.get_temp_dir()
model_filename = os.path.join(tmp_dir, 'model.tflite')
# 2. Write model to temporary file (will be used as input for xxd)
flatbuffer_utils.write_model(initial_model, model_filename)
# 3. DUMP WITH xxd
input_cc_file = os.path.join(tmp_dir, 'model.cc')
command = 'xxd -i {} > {}'.format(model_filename, input_cc_file)
subprocess.call(command, shell=True)
# 4. VALIDATE
final_bytes = flatbuffer_utils.xxd_output_to_bytes(input_cc_file)
# Validate that the initial and final bytearray are the same
self.assertEqual(initial_bytes, final_bytes)
if __name__ == '__main__':
test.main()
| karllessard/tensorflow | tensorflow/lite/tools/flatbuffer_utils_test.py | Python | apache-2.0 | 8,309 | 0.003972 |
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <ylatuya@gmail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
import shutil
from cerbero.config import Platform
from cerbero.utils import shell
# Clean-up LD environment to avoid library version mismatches while running
# the system subversion
CLEAN_ENV = os.environ.copy()
if CLEAN_ENV.has_key('LD_LIBRARY_PATH'):
CLEAN_ENV.pop('LD_LIBRARY_PATH')
GIT = 'git'
def init(git_dir):
'''
Initialize a git repository with 'git init'
@param git_dir: path of the git repository
@type git_dir: str
'''
shell.call('mkdir -p %s' % git_dir)
shell.call('%s init' % GIT, git_dir, env=CLEAN_ENV)
def clean(git_dir):
'''
Clean a git respository with clean -dfx
@param git_dir: path of the git repository
@type git_dir: str
'''
return shell.call('%s clean -dfx' % GIT, git_dir, env=CLEAN_ENV)
def list_tags(git_dir, fail=True):
'''
List all tags
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
@return: list of tag names (str)
@rtype: list
'''
tags = shell.check_call('%s tag -l' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
tags = tags.strip()
if tags:
tags = tags.split('\n')
return tags
def create_tag(git_dir, tagname, tagdescription, commit, fail=True):
'''
Create a tag using commit
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to create
@type tagname: str
@param tagdescription: the tag description
@type tagdescription: str
@param commit: the tag commit to use
@type commit: str
@param fail: raise an error if the command failed
@type fail: false
'''
shell.call('%s tag -s %s -m "%s" %s' %
(GIT, tagname, tagdescription, commit), git_dir, fail=fail,
env=CLEAN_ENV)
return shell.call('%s push origin %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def delete_tag(git_dir, tagname, fail=True):
'''
Delete a tag
@param git_dir: path of the git repository
@type git_dir: str
@param tagname: name of the tag to delete
@type tagname: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s tag -d %s' % (GIT, tagname), git_dir, fail=fail,
env=CLEAN_ENV)
def fetch(git_dir, fail=True):
'''
Fetch all refs from all the remotes
@param git_dir: path of the git repository
@type git_dir: str
@param fail: raise an error if the command failed
@type fail: false
'''
return shell.call('%s fetch --all' % GIT, git_dir, fail=fail, env=CLEAN_ENV)
def submodules_update(git_dir, src_dir=None, fail=True):
'''
Update somdules from local directory
@param git_dir: path of the git repository
@type git_dir: str
@param src_dir: path or base URI of the source directory
@type src_dir: src
@param fail: raise an error if the command failed
@type fail: false
'''
if src_dir:
config = shell.check_call('%s config --file=.gitmodules --list' % GIT,
git_dir)
config_array = [s.split('=', 1) for s in config.split('\n')]
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.path'):
submodule = c[0][len('submodule.'):-len('.path')]
shell.call("%s config --file=.gitmodules submodule.%s.url %s" %
(GIT, submodule, os.path.join(src_dir, c[1])),
git_dir)
shell.call("%s submodule init" % GIT, git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
shell.call("%s submodule update" % GIT, git_dir, fail=fail)
if src_dir:
for c in config_array:
if c[0].startswith('submodule.') and c[0].endswith('.url'):
shell.call("%s config --file=.gitmodules %s %s" %
(GIT, c[0], c[1]), git_dir)
shell.call("%s submodule sync" % GIT, git_dir)
def checkout(git_dir, commit):
'''
Reset a git repository to a given commit
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to checkout
@type commit: str
'''
return shell.call('%s reset --hard %s' % (GIT, commit), git_dir,
env=CLEAN_ENV)
def get_hash(git_dir, commit):
'''
Get a commit hash from a valid commit.
Can be used to check if a commit exists
@param git_dir: path of the git repository
@type git_dir: str
@param commit: the commit to log
@type commit: str
'''
return shell.check_call('%s show -s --pretty=%%H %s' %
(GIT, commit), git_dir, env=CLEAN_ENV)
def local_checkout(git_dir, local_git_dir, commit):
'''
Clone a repository for a given commit in a different location
@param git_dir: destination path of the git repository
@type git_dir: str
@param local_git_dir: path of the source git repository
@type local_git_dir: str
@param commit: the commit to checkout
@type commit: false
'''
# reset to a commit in case it's the first checkout and the masterbranch is
# missing
branch_name = 'cerbero_build'
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False,
env=CLEAN_ENV)
shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir,
env=CLEAN_ENV)
shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir,
env=CLEAN_ENV)
shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir,
branch_name),
git_dir, env=CLEAN_ENV)
submodules_update(git_dir, local_git_dir)
def add_remote(git_dir, name, url):
'''
Add a remote to a git repository
@param git_dir: destination path of the git repository
@type git_dir: str
@param name: name of the remote
@type name: str
@param url: url of the remote
@type url: str
'''
try:
shell.call('%s remote add %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
except:
shell.call('%s remote set-url %s %s' % (GIT, name, url), git_dir,
env=CLEAN_ENV)
def check_line_endings(platform):
'''
Checks if on windows we don't use the automatic line endings conversion
as it breaks everything
@param platform: the host platform
@type platform: L{cerbero.config.Platform}
@return: true if git config is core.autorlf=false
@rtype: bool
'''
if platform != Platform.WINDOWS:
return True
val = shell.check_call('%s config --get core.autocrlf' % GIT, env=CLEAN_ENV)
if ('false' in val.lower()):
return True
return False
def init_directory(git_dir):
'''
Initialize a git repository with the contents
of a directory
@param git_dir: path of the git repository
@type git_dir: str
'''
init(git_dir)
try:
shell.call('%s add --force -A .' % GIT, git_dir, env=CLEAN_ENV)
shell.call('%s commit -m "Initial commit" > /dev/null 2>&1' % GIT,
git_dir, env=CLEAN_ENV)
except:
pass
def apply_patch(patch, git_dir):
'''
Applies a commit patch usign 'git am'
of a directory
@param git_dir: path of the git repository
@type git_dir: str
@param patch: path of the patch file
@type patch: str
'''
shell.call('%s am --ignore-whitespace %s' % (GIT, patch), git_dir,
env=CLEAN_ENV)
| sdroege/cerbero | cerbero/utils/git.py | Python | lgpl-2.1 | 8,593 | 0.00128 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
OTBUtils.py
---------------------
Date : 11-12-13
Copyright : (C) 2013 by CS Systemes d'information (CS SI)
Email : otb at c-s dot fr (CS SI)
Contributors : Julien Malik (CS SI) - creation of otbspecific
Oscar Picas (CS SI) -
Alexia Mondot (CS SI) - split otbspecific into 2 files
add functions
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
When an OTB algorithms is run, this file allows adapting user parameter to fit the otbapplication.
Most of the following functions are like follows :
adaptNameOfTheOTBApplication(commands_list)
The command list is a list of all parameters of the given algorithm with all user values.
"""
__author__ = 'Julien Malik, Oscar Picas, Alexia Mondot'
__date__ = 'December 2013'
__copyright__ = '(C) 2013, CS Systemes d\'information (CS SI)'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
__version__ = "3.8"
import os
try:
import processing
except ImportError as e:
raise Exception("Processing must be installed and available in PYTHONPATH")
from processing.core.ProcessingConfig import ProcessingConfig
from OTBUtils import OTBUtils
def adaptBinaryMorphologicalOperation(commands_list):
val = commands_list[commands_list.index("-filter") + 1]
def replace_dilate(param, value):
if ".dilate" in str(param):
return param.replace("dilate", value)
else:
return param
import functools
com_list = map(functools.partial(replace_dilate, value=val), commands_list)
val = com_list[com_list.index("-structype.ball.xradius") + 1]
pos = com_list.index("-structype.ball.xradius") + 2
com_list.insert(pos, '-structype.ball.yradius')
com_list.insert(pos + 1, val)
return com_list
def adaptEdgeExtraction(commands_list):
"""
Add filter.touzi.yradius as the same value as filter.touzi.xradius
"""
val = commands_list[commands_list.index("-filter") + 1]
if val == 'touzi':
bval = commands_list[commands_list.index("-filter.touzi.xradius") + 1]
pos = commands_list.index("-filter.touzi.xradius") + 2
commands_list.insert(pos, "-filter.touzi.yradius")
commands_list.insert(pos + 1, bval)
return commands_list
def adaptGrayScaleMorphologicalOperation(commands_list):
"""
Add structype.ball.yradius as the same value as structype.ball.xradius (as it is a ball)
"""
val = commands_list[commands_list.index("-structype.ball.xradius") + 1]
pos = commands_list.index("-structype.ball.xradius") + 2
commands_list.insert(pos, "-structype.ball.yradius")
commands_list.insert(pos + 1, val)
return commands_list
def adaptSplitImage(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".tif"
If no extension given, put ".tif" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".tif")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".tif" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptLSMSVectorization(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".shp"
If no extension given, put ".shp" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".shp")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".shp" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptComputeImagesStatistics(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".xml"
If no extension given, put ".shp" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".xml")
commands_list2.append(item)
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".xml" + commands_list[index + 1][-1]
return commands_list2
def adaptKmzExport(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".kmz"
If no extension given, put ".kmz" at the end of the filename.
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".kmz")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".kmz" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptColorMapping(commands_list):
"""
The output of this algorithm must be in uint8.
"""
indexInput = commands_list.index("-out")
commands_list[indexInput + 1] = commands_list[indexInput + 1] + " uint8"
return commands_list
def adaptStereoFramework(commands_list):
"""
Remove parameter and user value instead of giving None.
Check geoid file, srtm folder and given elevation and manage arguments.
"""
commands_list2 = commands_list
adaptGeoidSrtm(commands_list2)
for item in commands_list:
if "None" in item:
index = commands_list2.index(item)
argumentToRemove = commands_list2[index - 1]
commands_list2.remove(item)
commands_list2.remove(argumentToRemove)
#commands_list2.append(item)
return commands_list2
def adaptComputeConfusionMatrix(commands_list):
"""
Ran by default, the extension of output file is .file. Replace it with ".csv"
If no extension given, put ".csv" at the end of the filename.
"""
commands_list2 = []
for item in commands_list:
if ".file" in item:
item = item.replace(".file", ".csv")
if item == "-out":
index = commands_list.index(item)
if "." not in os.path.basename(commands_list[index + 1]):
commands_list[index + 1] = commands_list[index + 1][:-1] + ".csv" + commands_list[index + 1][-1]
commands_list2.append(item)
return commands_list2
def adaptRadiometricIndices(commands_list):
"""
Replace indice nickname by its corresponding entry in the following dictionary :
indices = {"ndvi" : "Vegetation:NDVI", "tndvi" : "Vegetation:TNDVI", "rvi" : "Vegetation:RVI", "savi" : "Vegetation:SAVI",
"tsavi" : "Vegetation:TSAVI", "msavi" : "Vegetation:MSAVI", "msavi2" : "Vegetation:MSAVI2", "gemi" : "Vegetation:GEMI",
"ipvi" : "Vegetation:IPVI",
"ndwi" : "Water:NDWI", "ndwi2" : "Water:NDWI2", "mndwi" :"Water:MNDWI" , "ndpi" : "Water:NDPI",
"ndti" : "Water:NDTI",
"ri" : "Soil:RI", "ci" : "Soil:CI", "bi" : "Soil:BI", "bi2" : "Soil:BI2"}
"""
# "laindvilog" : , "lairefl" : , "laindviformo" : ,
indices = {"ndvi": "Vegetation:NDVI", "tndvi": "Vegetation:TNDVI", "rvi": "Vegetation:RVI", "savi": "Vegetation:SAVI",
"tsavi": "Vegetation:TSAVI", "msavi": "Vegetation:MSAVI", "msavi2": "Vegetation:MSAVI2", "gemi": "Vegetation:GEMI",
"ipvi": "Vegetation:IPVI",
"ndwi": "Water:NDWI", "ndwi2": "Water:NDWI2", "mndwi": "Water:MNDWI", "ndpi": "Water:NDPI",
"ndti": "Water:NDTI",
"ri": "Soil:RI", "ci": "Soil:CI", "bi": "Soil:BI", "bi2": "Soil:BI2"}
for item in commands_list:
if item in indices:
commands_list[commands_list.index(item)] = indices[item]
return commands_list
def adaptDisparityMapToElevationMap(commands_list):
"""
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
return commands_list
def adaptConnectedComponentSegmentation(commands_list):
"""
Remove parameter and user value instead of giving None.
"""
commands_list2 = commands_list
adaptGeoidSrtm(commands_list2)
for item in commands_list:
if "None" in item:
index = commands_list2.index(item)
argumentToRemove = commands_list2[index - 1]
commands_list2.remove(item)
commands_list2.remove(argumentToRemove)
#commands_list2.append(item)
return commands_list2
def adaptSuperimpose(commands_list):
"""
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
return commands_list
def adaptOrthoRectification(commands_list):
"""
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
return commands_list
def adaptExtractROI(commands_list):
"""
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
return commands_list
def adaptTrainImagesClassifier(commands_list):
"""
Check geoid file, srtm folder and given elevation and manage arguments.
"""
adaptGeoidSrtm(commands_list)
return commands_list
def adaptGeoidSrtm(commands_list):
"""
Check geoid file, srtm folder and given elevation and manage arguments.
"""
srtm, geoid = ckeckGeoidSrtmSettings()
if srtm:
if commands_list[0].endswith("ExtractROI"):
commands_list.append("-mode.fit.elev.dem")
commands_list.append(srtm)
else:
commands_list.append("-elev.dem")
commands_list.append(srtm)
if geoid:
if commands_list[0].endswith("ExtractROI"):
commands_list.append("-mode.fit.elev.geoid")
commands_list.append(geoid)
else:
commands_list.append("-elev.geoid")
commands_list.append(geoid)
def adaptComputePolylineFeatureFromImage(commands_list):
"""
Remove parameter and user value instead of giving None.
Check geoid file, srtm folder and given elevation and manage arguments.
"""
commands_list2 = commands_list
adaptGeoidSrtm(commands_list2)
for item in commands_list:
if "None" in item:
index = commands_list2.index(item)
argumentToRemove = commands_list2[index - 1]
commands_list2.remove(item)
commands_list2.remove(argumentToRemove)
# commands_list2.append(item)
return commands_list2
def adaptComputeOGRLayersFeaturesStatistics(commands_list):
"""
Remove parameter and user value instead of giving None.
Check geoid file, srtm folder and given elevation and manage arguments.
"""
commands_list2 = commands_list
adaptGeoidSrtm(commands_list2)
for item in commands_list:
if "None" in item:
index = commands_list2.index(item)
argumentToRemove = commands_list2[index - 1]
commands_list2.remove(item)
commands_list2.remove(argumentToRemove)
# commands_list2.append(item)
return commands_list2
def ckeckGeoidSrtmSettings():
folder = ProcessingConfig.getSetting(OTBUtils.OTB_SRTM_FOLDER)
if folder is None:
folder = ""
filepath = ProcessingConfig.getSetting(OTBUtils.OTB_GEOID_FILE)
if filepath is None:
filepath = ""
return folder, filepath
| carolinux/QGIS | python/plugins/processing/algs/otb/OTBSpecific_XMLLoading.py | Python | gpl-2.0 | 12,848 | 0.001946 |
from pyramid.exceptions import ConfigurationError
from pyramid.interfaces import ISessionFactory
from .settings import parse_settings
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
def ensure():
if config.registry.queryUtility(ISessionFactory) is None:
raise ConfigurationError('pyramid_keystone requires a registered'
' session factory. (use the set_session_factory method)')
config.action('keystone-configure', register)
# We need to make sure that this is executed after the default Pyramid
# actions, because otherwise our Session Factory may not exist yet
config.action(None, ensure, order=10)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone',
property=True, reify=True)
| bertjwregeer/pyramid_keystone | pyramid_keystone/__init__.py | Python | isc | 1,415 | 0.003534 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from xbmctorrent import plugin
from xbmctorrent.scrapers import scraper
from xbmctorrent.ga import tracked
from xbmctorrent.caching import cached_route
from xbmctorrent.utils import ensure_fanart
from xbmctorrent.library import library_context
BASE_URL = plugin.get_setting("base_btdigg")
HEADERS = {
"Referer": BASE_URL,
}
SORT_RELEVANCE = 0
SORT_POPULARITY = 1
SORT_ADDTIME = 2
SORT_SIZE = 3
SORT_FILES = 4
@scraper("BTDigg - DHT Search Engine", "%s/logo.png" % BASE_URL)
@plugin.route("/btdigg")
@ensure_fanart
@tracked
def btdigg_index():
plugin.redirect(plugin.url_for("btdigg_search"))
@plugin.route("/btdigg/search/<query>/<sort>/<page>")
@library_context
@ensure_fanart
@tracked
def btdigg_page(query, sort, page):
from bs4 import BeautifulSoup
from xbmctorrent.utils import url_get
html_data = url_get("%s/search" % BASE_URL, headers=HEADERS, params={
"order": sort,
"q": query,
"p": page,
})
soup = BeautifulSoup(html_data, "html5lib")
name_nodes = soup.findAll("td", "torrent_name")
attr_nodes = soup.findAll("table", "torrent_name_tbl")[1::2]
for name_node, attr_node in zip(name_nodes, attr_nodes):
attrs = attr_node.findAll("span", "attr_val")
title = "%s (%s, DLs:%s)" % (name_node.find("a").text, attrs[0].text, attrs[2].text)
yield {
"label": title,
"path": plugin.url_for("play", uri=attr_node.find("a")["href"]),
"is_playable": True,
}
yield {
"label": ">> Next page",
"path": plugin.url_for("btdigg_page", query=query, sort=sort, page=int(page) + 1),
"is_playable": False,
}
@plugin.route("/btdigg/search")
@tracked
def btdigg_search():
query = plugin.request.args_dict.pop("query", None)
if not query:
query = plugin.keyboard("", "XBMCtorrent - BTDigg - Search")
if query:
plugin.redirect(plugin.url_for("btdigg_page", query=query, sort=SORT_POPULARITY, page=0, **plugin.request.args_dict))
| skipmodea1/plugin.video.xbmctorrent | resources/site-packages/xbmctorrent/scrapers/btdigg.py | Python | gpl-3.0 | 2,061 | 0.001456 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.rdbms.mariadb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def list(
self,
**kwargs: Any
) -> "_models.OperationListResult":
"""Lists all of the available REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationListResult, or the result of cls(response)
:rtype: ~azure.mgmt.rdbms.mariadb.models.OperationListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OperationListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/providers/Microsoft.DBForMariaDB/operations'} # type: ignore
| Azure/azure-sdk-for-python | sdk/rdbms/azure-mgmt-rdbms/azure/mgmt/rdbms/mariadb/aio/operations/_operations.py | Python | mit | 3,780 | 0.004233 |
# Program to print raw data of the accelerometer device
import sys
sys.path.append ("../../../lib")
import accel
import time
import numpy
import os
A = accel.Init ()
while(1):
time.sleep(0.25)
os.system ("clear")
print "\n\n\n\n"
(status, x) = accel.get_x (A)
(status, y) = accel.get_y (A)
(status, z) = accel.get_z (A)
print("\t{:7.2f} {:7.2f} {:7.2f}".format(x, y, z))
print "\t|A| = %6.3F" % numpy.sqrt (x*x + y*y + z*z)
| Daniel-Brosnan-Blazquez/DIT-100 | examples/IMU/acc/raw_data.py | Python | gpl-3.0 | 461 | 0.023861 |
"""Provides functionality to interact with image processing services."""
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME, CONF_ENTITY_ID, CONF_NAME
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import make_entity_service_schema
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.util.async_ import run_callback_threadsafe
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
DOMAIN = "image_processing"
SCAN_INTERVAL = timedelta(seconds=10)
DEVICE_CLASSES = [
"alpr", # Automatic license plate recognition
"face", # Face
"ocr", # OCR
]
SERVICE_SCAN = "scan"
EVENT_DETECT_FACE = "image_processing.detect_face"
ATTR_AGE = "age"
ATTR_CONFIDENCE = "confidence"
ATTR_FACES = "faces"
ATTR_GENDER = "gender"
ATTR_GLASSES = "glasses"
ATTR_MOTION = "motion"
ATTR_TOTAL_FACES = "total_faces"
CONF_SOURCE = "source"
CONF_CONFIDENCE = "confidence"
DEFAULT_TIMEOUT = 10
DEFAULT_CONFIDENCE = 80
SOURCE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ENTITY_ID): cv.entity_domain("camera"),
vol.Optional(CONF_NAME): cv.string,
}
)
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_SOURCE): vol.All(cv.ensure_list, [SOURCE_SCHEMA]),
vol.Optional(CONF_CONFIDENCE, default=DEFAULT_CONFIDENCE): vol.All(
vol.Coerce(float), vol.Range(min=0, max=100)
),
}
)
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE.extend(PLATFORM_SCHEMA.schema)
async def async_setup(hass, config):
"""Set up the image processing."""
component = EntityComponent(_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
await component.async_setup(config)
async def async_scan_service(service):
"""Service handler for scan."""
image_entities = await component.async_extract_from_service(service)
update_tasks = []
for entity in image_entities:
entity.async_set_context(service.context)
update_tasks.append(entity.async_update_ha_state(True))
if update_tasks:
await asyncio.wait(update_tasks)
hass.services.async_register(
DOMAIN, SERVICE_SCAN, async_scan_service, schema=make_entity_service_schema({})
)
return True
class ImageProcessingEntity(Entity):
"""Base entity class for image processing."""
timeout = DEFAULT_TIMEOUT
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return None
@property
def confidence(self):
"""Return minimum confidence for do some things."""
return None
def process_image(self, image):
"""Process image."""
raise NotImplementedError()
async def async_process_image(self, image):
"""Process image."""
return await self.hass.async_add_executor_job(self.process_image, image)
async def async_update(self):
"""Update image and process it.
This method is a coroutine.
"""
camera = self.hass.components.camera
image = None
try:
image = await camera.async_get_image(
self.camera_entity, timeout=self.timeout
)
except HomeAssistantError as err:
_LOGGER.error("Error on receive image from entity: %s", err)
return
# process image data
await self.async_process_image(image.content)
class ImageProcessingFaceEntity(ImageProcessingEntity):
"""Base entity class for face image processing."""
def __init__(self):
"""Initialize base face identify/verify entity."""
self.faces = []
self.total_faces = 0
@property
def state(self):
"""Return the state of the entity."""
confidence = 0
state = None
# No confidence support
if not self.confidence:
return self.total_faces
# Search high confidence
for face in self.faces:
if ATTR_CONFIDENCE not in face:
continue
f_co = face[ATTR_CONFIDENCE]
if f_co > confidence:
confidence = f_co
for attr in [ATTR_NAME, ATTR_MOTION]:
if attr in face:
state = face[attr]
break
return state
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return "face"
@property
def state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_FACES: self.faces, ATTR_TOTAL_FACES: self.total_faces}
def process_faces(self, faces, total):
"""Send event with detected faces and store data."""
run_callback_threadsafe(
self.hass.loop, self.async_process_faces, faces, total
).result()
@callback
def async_process_faces(self, faces, total):
"""Send event with detected faces and store data.
known are a dict in follow format:
[
{
ATTR_CONFIDENCE: 80,
ATTR_NAME: 'Name',
ATTR_AGE: 12.0,
ATTR_GENDER: 'man',
ATTR_MOTION: 'smile',
ATTR_GLASSES: 'sunglasses'
},
]
This method must be run in the event loop.
"""
# Send events
for face in faces:
if ATTR_CONFIDENCE in face and self.confidence:
if face[ATTR_CONFIDENCE] < self.confidence:
continue
face.update({ATTR_ENTITY_ID: self.entity_id})
self.hass.async_add_job(self.hass.bus.async_fire, EVENT_DETECT_FACE, face)
# Update entity store
self.faces = faces
self.total_faces = total
| tboyce021/home-assistant | homeassistant/components/image_processing/__init__.py | Python | apache-2.0 | 6,082 | 0.000658 |
import json
import logging
import traceback
from lxml import etree
from xmodule.timeinfo import TimeInfo
from xmodule.capa_module import ComplexEncoder
from xmodule.progress import Progress
from xmodule.stringify import stringify_children
from xmodule.open_ended_grading_classes import self_assessment_module
from xmodule.open_ended_grading_classes import open_ended_module
from functools import partial
from .combined_open_ended_rubric import CombinedOpenEndedRubric, GRADER_TYPE_IMAGE_DICT, HUMAN_GRADER_TYPE, LEGEND_LIST
from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, MockPeerGradingService, GradingServiceError
from xmodule.open_ended_grading_classes.openendedchild import OpenEndedChild
log = logging.getLogger("edx.courseware")
# Set the default number of max attempts. Should be 1 for production
# Set higher for debugging/testing
# attempts specified in xml definition overrides this.
MAX_ATTEMPTS = 1
# The highest score allowed for the overall xmodule and for each rubric point
MAX_SCORE_ALLOWED = 50
# If true, default behavior is to score module as a practice problem. Otherwise, no grade at all is shown in progress
# Metadata overrides this.
IS_SCORED = False
# If true, then default behavior is to require a file upload or pasted link from a student for this problem.
# Metadata overrides this.
ACCEPT_FILE_UPLOAD = False
# Contains all reasonable bool and case combinations of True
TRUE_DICT = ["True", True, "TRUE", "true"]
HUMAN_TASK_TYPE = {
'selfassessment': "Self",
'openended': "edX",
'ml_grading.conf': "AI",
'peer_grading.conf': "Peer",
}
HUMAN_STATES = {
'intitial': "Not started.",
'assessing': "Being scored.",
'intermediate_done': "Scoring finished.",
'done': "Complete.",
}
# Default value that controls whether or not to skip basic spelling checks in the controller
# Metadata overrides this
SKIP_BASIC_CHECKS = False
class CombinedOpenEndedV1Module():
"""
This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc).
It transitions between problems, and support arbitrary ordering.
Each combined open ended module contains one or multiple "child" modules.
Child modules track their own state, and can transition between states. They also implement get_html and
handle_ajax.
The combined open ended module transitions between child modules as appropriate, tracks its own state, and passess
ajax requests from the browser to the child module or handles them itself (in the cases of reset and next problem)
ajax actions implemented by all children are:
'save_answer' -- Saves the student answer
'save_assessment' -- Saves the student assessment (or external grader assessment)
'save_post_assessment' -- saves a post assessment (hint, feedback on feedback, etc)
ajax actions implemented by combined open ended module are:
'reset' -- resets the whole combined open ended module and returns to the first child moduleresource_string
'next_problem' -- moves to the next child module
Types of children. Task is synonymous with child module, so each combined open ended module
incorporates multiple children (tasks):
openendedmodule
selfassessmentmodule
"""
STATE_VERSION = 1
# states
INITIAL = 'initial'
ASSESSING = 'assessing'
INTERMEDIATE_DONE = 'intermediate_done'
DONE = 'done'
# Where the templates live for this problem
TEMPLATE_DIR = "combinedopenended"
def __init__(self, system, location, definition, descriptor,
instance_state=None, shared_state=None, metadata=None, static_data=None, **kwargs):
"""
Definition file should have one or many task blocks, a rubric block, and a prompt block. See DEFAULT_DATA in combined_open_ended_module for a sample.
"""
self.instance_state = instance_state
self.display_name = instance_state.get('display_name', "Open Ended")
# We need to set the location here so the child modules can use it
system.set('location', location)
self.system = system
# Tells the system which xml definition to load
self.current_task_number = instance_state.get('current_task_number', 0)
# This loads the states of the individual children
self.task_states = instance_state.get('task_states', [])
#This gets any old task states that have been persisted after the instructor changed the tasks.
self.old_task_states = instance_state.get('old_task_states', [])
# Overall state of the combined open ended module
self.state = instance_state.get('state', self.INITIAL)
self.student_attempts = instance_state.get('student_attempts', 0)
self.weight = instance_state.get('weight', 1)
# Allow reset is true if student has failed the criteria to move to the next child task
self.ready_to_reset = instance_state.get('ready_to_reset', False)
self.max_attempts = instance_state.get('max_attempts', MAX_ATTEMPTS)
self.is_scored = instance_state.get('graded', IS_SCORED) in TRUE_DICT
self.accept_file_upload = instance_state.get('accept_file_upload', ACCEPT_FILE_UPLOAD) in TRUE_DICT
self.skip_basic_checks = instance_state.get('skip_spelling_checks', SKIP_BASIC_CHECKS) in TRUE_DICT
if system.open_ended_grading_interface:
self.peer_gs = PeerGradingService(system.open_ended_grading_interface, system)
else:
self.peer_gs = MockPeerGradingService()
self.required_peer_grading = instance_state.get('required_peer_grading', 3)
self.peer_grader_count = instance_state.get('peer_grader_count', 3)
self.min_to_calibrate = instance_state.get('min_to_calibrate', 3)
self.max_to_calibrate = instance_state.get('max_to_calibrate', 6)
self.peer_grade_finished_submissions_when_none_pending = instance_state.get(
'peer_grade_finished_submissions_when_none_pending', False
)
due_date = instance_state.get('due', None)
grace_period_string = instance_state.get('graceperiod', None)
try:
self.timeinfo = TimeInfo(due_date, grace_period_string)
except Exception:
log.error("Error parsing due date information in location {0}".format(location))
raise
self.display_due_date = self.timeinfo.display_due_date
self.rubric_renderer = CombinedOpenEndedRubric(system, True)
rubric_string = stringify_children(definition['rubric'])
self._max_score = self.rubric_renderer.check_if_rubric_is_parseable(rubric_string, location, MAX_SCORE_ALLOWED)
# Static data is passed to the child modules to render
self.static_data = {
'max_score': self._max_score,
'max_attempts': self.max_attempts,
'prompt': definition['prompt'],
'rubric': definition['rubric'],
'display_name': self.display_name,
'accept_file_upload': self.accept_file_upload,
'close_date': self.timeinfo.close_date,
's3_interface': self.system.s3_interface,
'skip_basic_checks': self.skip_basic_checks,
'control': {
'required_peer_grading': self.required_peer_grading,
'peer_grader_count': self.peer_grader_count,
'min_to_calibrate': self.min_to_calibrate,
'max_to_calibrate': self.max_to_calibrate,
'peer_grade_finished_submissions_when_none_pending': (
self.peer_grade_finished_submissions_when_none_pending
),
}
}
self.task_xml = definition['task_xml']
self.location = location
self.fix_invalid_state()
self.setup_next_task()
def validate_task_states(self, tasks_xml, task_states):
"""
Check whether the provided task_states are valid for the supplied task_xml.
Returns a list of messages indicating what is invalid about the state.
If the list is empty, then the state is valid
"""
msgs = []
#Loop through each task state and make sure it matches the xml definition
for task_xml, task_state in zip(tasks_xml, task_states):
tag_name = self.get_tag_name(task_xml)
children = self.child_modules()
task_descriptor = children['descriptors'][tag_name](self.system)
task_parsed_xml = task_descriptor.definition_from_xml(etree.fromstring(task_xml), self.system)
try:
task = children['modules'][tag_name](
self.system,
self.location,
task_parsed_xml,
task_descriptor,
self.static_data,
instance_state=task_state,
)
#Loop through each attempt of the task and see if it is valid.
for attempt in task.child_history:
if "post_assessment" not in attempt:
continue
post_assessment = attempt['post_assessment']
try:
post_assessment = json.loads(post_assessment)
except ValueError:
#This is okay, the value may or may not be json encoded.
pass
if tag_name == "openended" and isinstance(post_assessment, list):
msgs.append("Type is open ended and post assessment is a list.")
break
elif tag_name == "selfassessment" and not isinstance(post_assessment, list):
msgs.append("Type is self assessment and post assessment is not a list.")
break
#See if we can properly render the task. Will go into the exception clause below if not.
task.get_html(self.system)
except Exception:
#If one task doesn't match, the state is invalid.
msgs.append("Could not parse task with xml {xml!r} and states {state!r}: {err}".format(
xml=task_xml,
state=task_state,
err=traceback.format_exc()
))
break
return msgs
def is_initial_child_state(self, task_child):
"""
Returns true if this is a child task in an initial configuration
"""
task_child = json.loads(task_child)
return (
task_child['child_state'] == self.INITIAL and
task_child['child_history'] == []
)
def is_reset_task_states(self, task_state):
"""
Returns True if this task_state is from something that was just reset
"""
return all(self.is_initial_child_state(child) for child in task_state)
def states_sort_key(self, idx_task_states):
"""
Return a key for sorting a list of indexed task_states, by how far the student got
through the tasks, what their highest score was, and then the index of the submission.
"""
idx, task_states = idx_task_states
state_values = {
OpenEndedChild.INITIAL: 0,
OpenEndedChild.ASSESSING: 1,
OpenEndedChild.POST_ASSESSMENT: 2,
OpenEndedChild.DONE: 3
}
if not task_states:
return (0, 0, state_values[OpenEndedChild.INITIAL], idx)
final_task_xml = self.task_xml[-1]
final_child_state_json = task_states[-1]
final_child_state = json.loads(final_child_state_json)
tag_name = self.get_tag_name(final_task_xml)
children = self.child_modules()
task_descriptor = children['descriptors'][tag_name](self.system)
task_parsed_xml = task_descriptor.definition_from_xml(etree.fromstring(final_task_xml), self.system)
task = children['modules'][tag_name](
self.system,
self.location,
task_parsed_xml,
task_descriptor,
self.static_data,
instance_state=final_child_state_json,
)
scores = task.all_scores()
if scores:
best_score = max(scores)
else:
best_score = 0
return (
len(task_states),
best_score,
state_values.get(final_child_state.get('child_state', OpenEndedChild.INITIAL), 0),
idx
)
def fix_invalid_state(self):
"""
Sometimes a teacher will change the xml definition of a problem in Studio.
This means that the state passed to the module is invalid.
If that is the case, moved it to old_task_states and delete task_states.
"""
# If we are on a task that is greater than the number of available tasks,
# it is an invalid state. If the current task number is greater than the number of tasks
# we have in the definition, our state is invalid.
if self.current_task_number > len(self.task_states) or self.current_task_number > len(self.task_xml):
self.current_task_number = max(min(len(self.task_states), len(self.task_xml)) - 1, 0)
#If the length of the task xml is less than the length of the task states, state is invalid
if len(self.task_xml) < len(self.task_states):
self.current_task_number = len(self.task_xml) - 1
self.task_states = self.task_states[:len(self.task_xml)]
if not self.old_task_states and not self.task_states:
# No validation needed when a student first looks at the problem
return
# Pick out of self.task_states and self.old_task_states the state that is
# a) valid for the current task definition
# b) not the result of a reset due to not having a valid task state
# c) has the highest total score
# d) is the most recent (if the other two conditions are met)
valid_states = [
task_states
for task_states
in self.old_task_states + [self.task_states]
if (
len(self.validate_task_states(self.task_xml, task_states)) == 0 and
not self.is_reset_task_states(task_states)
)
]
# If there are no valid states, don't try and use an old state
if len(valid_states) == 0:
# If this isn't an initial task state, then reset to an initial state
if not self.is_reset_task_states(self.task_states):
self.reset_task_state('\n'.join(self.validate_task_states(self.task_xml, self.task_states)))
return
sorted_states = sorted(enumerate(valid_states), key=self.states_sort_key, reverse=True)
idx, best_task_states = sorted_states[0]
if best_task_states == self.task_states:
return
log.warning(
"Updating current task state for %s to %r for student with anonymous id %r",
self.system.location,
best_task_states,
self.system.anonymous_student_id
)
self.old_task_states.remove(best_task_states)
self.old_task_states.append(self.task_states)
self.task_states = best_task_states
# The state is ASSESSING unless all of the children are done, or all
# of the children haven't been started yet
children = [json.loads(child) for child in best_task_states]
if all(child['child_state'] == self.DONE for child in children):
self.state = self.DONE
elif all(child['child_state'] == self.INITIAL for child in children):
self.state = self.INITIAL
else:
self.state = self.ASSESSING
# The current task number is the index of the last completed child + 1,
# limited by the number of tasks
last_completed_child = next((i for i, child in reversed(list(enumerate(children))) if child['child_state'] == self.DONE), 0)
self.current_task_number = min(last_completed_child + 1, len(best_task_states) - 1)
def reset_task_state(self, message=""):
"""
Resets the task states. Moves current task state to an old_state variable, and then makes the task number 0.
:param message: A message to put in the log.
:return: None
"""
info_message = "Combined open ended user state for user {0} in location {1} was invalid. It has been reset, and you now have a new attempt. {2}".format(self.system.anonymous_student_id, self.location.url(), message)
self.current_task_number = 0
self.student_attempts = 0
self.old_task_states.append(self.task_states)
self.task_states = []
log.info(info_message)
def get_tag_name(self, xml):
"""
Gets the tag name of a given xml block.
Input: XML string
Output: The name of the root tag
"""
tag = etree.fromstring(xml).tag
return tag
def overwrite_state(self, current_task_state):
"""
Overwrites an instance state and sets the latest response to the current response. This is used
to ensure that the student response is carried over from the first child to the rest.
Input: Task state json string
Output: Task state json string
"""
last_response_data = self.get_last_response(self.current_task_number - 1)
last_response = last_response_data['response']
loaded_task_state = json.loads(current_task_state)
if loaded_task_state['child_state'] == self.INITIAL:
loaded_task_state['child_state'] = self.ASSESSING
loaded_task_state['child_created'] = True
loaded_task_state['child_history'].append({'answer': last_response})
current_task_state = json.dumps(loaded_task_state)
return current_task_state
def child_modules(self):
"""
Returns the constructors associated with the child modules in a dictionary. This makes writing functions
simpler (saves code duplication)
Input: None
Output: A dictionary of dictionaries containing the descriptor functions and module functions
"""
child_modules = {
'openended': open_ended_module.OpenEndedModule,
'selfassessment': self_assessment_module.SelfAssessmentModule,
}
child_descriptors = {
'openended': open_ended_module.OpenEndedDescriptor,
'selfassessment': self_assessment_module.SelfAssessmentDescriptor,
}
children = {
'modules': child_modules,
'descriptors': child_descriptors,
}
return children
def setup_next_task(self, reset=False):
"""
Sets up the next task for the module. Creates an instance state if none exists, carries over the answer
from the last instance state to the next if needed.
Input: A boolean indicating whether or not the reset function is calling.
Output: Boolean True (not useful right now)
"""
current_task_state = None
if len(self.task_states) > self.current_task_number:
current_task_state = self.task_states[self.current_task_number]
self.current_task_xml = self.task_xml[self.current_task_number]
if self.current_task_number > 0:
self.ready_to_reset = self.check_allow_reset()
if self.ready_to_reset:
self.current_task_number = self.current_task_number - 1
current_task_type = self.get_tag_name(self.current_task_xml)
children = self.child_modules()
child_task_module = children['modules'][current_task_type]
self.current_task_descriptor = children['descriptors'][current_task_type](self.system)
# This is the xml object created from the xml definition of the current task
etree_xml = etree.fromstring(self.current_task_xml)
# This sends the etree_xml object through the descriptor module of the current task, and
# returns the xml parsed by the descriptor
self.current_task_parsed_xml = self.current_task_descriptor.definition_from_xml(etree_xml, self.system)
if current_task_state is None and self.current_task_number == 0:
self.current_task = child_task_module(self.system, self.location,
self.current_task_parsed_xml, self.current_task_descriptor,
self.static_data)
self.task_states.append(self.current_task.get_instance_state())
self.state = self.ASSESSING
elif current_task_state is None and self.current_task_number > 0:
last_response_data = self.get_last_response(self.current_task_number - 1)
last_response = last_response_data['response']
current_task_state = json.dumps({
'child_state': self.ASSESSING,
'version': self.STATE_VERSION,
'max_score': self._max_score,
'child_attempts': 0,
'child_created': True,
'child_history': [{'answer': last_response}],
})
self.current_task = child_task_module(self.system, self.location,
self.current_task_parsed_xml, self.current_task_descriptor,
self.static_data,
instance_state=current_task_state)
self.task_states.append(self.current_task.get_instance_state())
self.state = self.ASSESSING
else:
if self.current_task_number > 0 and not reset:
current_task_state = self.overwrite_state(current_task_state)
self.current_task = child_task_module(self.system, self.location,
self.current_task_parsed_xml, self.current_task_descriptor,
self.static_data,
instance_state=current_task_state)
return True
def check_allow_reset(self):
"""
Checks to see if the student has passed the criteria to move to the next module. If not, sets
allow_reset to true and halts the student progress through the tasks.
Input: None
Output: the allow_reset attribute of the current module.
"""
if not self.ready_to_reset:
if self.current_task_number > 0:
last_response_data = self.get_last_response(self.current_task_number - 1)
current_response_data = self.get_current_attributes(self.current_task_number)
if (current_response_data['min_score_to_attempt'] > last_response_data['score']
or current_response_data['max_score_to_attempt'] < last_response_data['score']):
self.state = self.DONE
self.ready_to_reset = True
return self.ready_to_reset
def get_context(self):
"""
Generates a context dictionary that is used to render html.
Input: None
Output: A dictionary that can be rendered into the combined open ended template.
"""
task_html = self.get_html_base()
# set context variables and render template
context = {
'items': [{'content': task_html}],
'ajax_url': self.system.ajax_url,
'allow_reset': self.ready_to_reset,
'state': self.state,
'task_count': len(self.task_xml),
'task_number': self.current_task_number + 1,
'status': self.get_status(False),
'display_name': self.display_name,
'accept_file_upload': self.accept_file_upload,
'location': self.location,
'legend_list': LEGEND_LIST,
'human_state': HUMAN_STATES.get(self.state, "Not started."),
'is_staff': self.system.user_is_staff,
}
return context
def get_html(self):
"""
Gets HTML for rendering.
Input: None
Output: rendered html
"""
context = self.get_context()
html = self.system.render_template('{0}/combined_open_ended.html'.format(self.TEMPLATE_DIR), context)
return html
def get_html_nonsystem(self):
"""
Gets HTML for rendering via AJAX. Does not use system, because system contains some additional
html, which is not appropriate for returning via ajax calls.
Input: None
Output: HTML rendered directly via Mako
"""
context = self.get_context()
html = self.system.render_template('{0}/combined_open_ended.html'.format(self.TEMPLATE_DIR), context)
return html
def get_html_base(self):
"""
Gets the HTML associated with the current child task
Input: None
Output: Child task HTML
"""
self.update_task_states()
return self.current_task.get_html(self.system)
def get_html_ajax(self, data):
"""
Get HTML in AJAX callback
data - Needed to preserve AJAX structure
Output: Dictionary with html attribute
"""
return {'html': self.get_html()}
def get_current_attributes(self, task_number):
"""
Gets the min and max score to attempt attributes of the specified task.
Input: The number of the task.
Output: The minimum and maximum scores needed to move on to the specified task.
"""
task_xml = self.task_xml[task_number]
etree_xml = etree.fromstring(task_xml)
min_score_to_attempt = int(etree_xml.attrib.get('min_score_to_attempt', 0))
max_score_to_attempt = int(etree_xml.attrib.get('max_score_to_attempt', self._max_score))
return {'min_score_to_attempt': min_score_to_attempt, 'max_score_to_attempt': max_score_to_attempt}
def get_last_response(self, task_number):
"""
Returns data associated with the specified task number, such as the last response, score, etc.
Input: The number of the task.
Output: A dictionary that contains information about the specified task.
"""
last_response = ""
task_state = self.task_states[task_number]
task_xml = self.task_xml[task_number]
task_type = self.get_tag_name(task_xml)
children = self.child_modules()
task_descriptor = children['descriptors'][task_type](self.system)
etree_xml = etree.fromstring(task_xml)
min_score_to_attempt = int(etree_xml.attrib.get('min_score_to_attempt', 0))
max_score_to_attempt = int(etree_xml.attrib.get('max_score_to_attempt', self._max_score))
task_parsed_xml = task_descriptor.definition_from_xml(etree_xml, self.system)
task = children['modules'][task_type](self.system, self.location, task_parsed_xml, task_descriptor,
self.static_data, instance_state=task_state)
last_response = task.latest_answer()
last_score = task.latest_score()
all_scores = task.all_scores()
last_post_assessment = task.latest_post_assessment(self.system)
last_post_feedback = ""
feedback_dicts = [{}]
grader_ids = [0]
submission_ids = [0]
if task_type == "openended":
last_post_assessment = task.latest_post_assessment(self.system, short_feedback=False, join_feedback=False)
if isinstance(last_post_assessment, list):
eval_list = []
for i in xrange(0, len(last_post_assessment)):
eval_list.append(task.format_feedback_with_evaluation(self.system, last_post_assessment[i]))
last_post_evaluation = "".join(eval_list)
else:
last_post_evaluation = task.format_feedback_with_evaluation(self.system, last_post_assessment)
last_post_assessment = last_post_evaluation
try:
rubric_data = task._parse_score_msg(task.child_history[-1].get('post_assessment', ""), self.system)
except Exception:
log.debug("Could not parse rubric data from child history. "
"Likely we have not yet initialized a previous step, so this is perfectly fine.")
rubric_data = {}
rubric_scores = rubric_data.get('rubric_scores')
grader_types = rubric_data.get('grader_types')
feedback_items = rubric_data.get('feedback_items')
feedback_dicts = rubric_data.get('feedback_dicts')
grader_ids = rubric_data.get('grader_ids')
submission_ids = rubric_data.get('submission_ids')
elif task_type == "selfassessment":
rubric_scores = last_post_assessment
grader_types = ['SA']
feedback_items = ['']
last_post_assessment = ""
last_correctness = task.is_last_response_correct()
max_score = task.max_score()
state = task.child_state
if task_type in HUMAN_TASK_TYPE:
human_task_name = HUMAN_TASK_TYPE[task_type]
else:
human_task_name = task_type
if state in task.HUMAN_NAMES:
human_state = task.HUMAN_NAMES[state]
else:
human_state = state
if grader_types is not None and len(grader_types) > 0:
grader_type = grader_types[0]
else:
grader_type = "IN"
grader_types = ["IN"]
if grader_type in HUMAN_GRADER_TYPE:
human_grader_name = HUMAN_GRADER_TYPE[grader_type]
else:
human_grader_name = grader_type
last_response_dict = {
'response': last_response,
'score': last_score,
'all_scores': all_scores,
'post_assessment': last_post_assessment,
'type': task_type,
'max_score': max_score,
'state': state,
'human_state': human_state,
'human_task': human_task_name,
'correct': last_correctness,
'min_score_to_attempt': min_score_to_attempt,
'max_score_to_attempt': max_score_to_attempt,
'rubric_scores': rubric_scores,
'grader_types': grader_types,
'feedback_items': feedback_items,
'grader_type': grader_type,
'human_grader_type': human_grader_name,
'feedback_dicts': feedback_dicts,
'grader_ids': grader_ids,
'submission_ids': submission_ids,
'success': True
}
return last_response_dict
def extract_human_name_from_task(self, task_xml):
"""
Given the xml for a task, pull out the human name for it.
Input: xml string
Output: a human readable task name (ie Self Assessment)
"""
tree = etree.fromstring(task_xml)
payload = tree.xpath("/openended/openendedparam/grader_payload")
if len(payload) == 0:
task_name = "selfassessment"
else:
inner_payload = json.loads(payload[0].text)
task_name = inner_payload['grader_settings']
human_task = HUMAN_TASK_TYPE[task_name]
return human_task
def update_task_states(self):
"""
Updates the task state of the combined open ended module with the task state of the current child module.
Input: None
Output: boolean indicating whether or not the task state changed.
"""
changed = False
if not self.ready_to_reset:
self.task_states[self.current_task_number] = self.current_task.get_instance_state()
current_task_state = json.loads(self.task_states[self.current_task_number])
if current_task_state['child_state'] == self.DONE:
self.current_task_number += 1
if self.current_task_number >= (len(self.task_xml)):
self.state = self.DONE
self.current_task_number = len(self.task_xml) - 1
else:
self.state = self.INITIAL
changed = True
self.setup_next_task()
return changed
def update_task_states_ajax(self, return_html):
"""
Runs the update task states function for ajax calls. Currently the same as update_task_states
Input: The html returned by the handle_ajax function of the child
Output: New html that should be rendered
"""
changed = self.update_task_states()
if changed:
pass
return return_html
def check_if_student_has_done_needed_grading(self):
"""
Checks with the ORA server to see if the student has completed the needed peer grading to be shown their grade.
For example, if a student submits one response, and three peers grade their response, the student
cannot see their grades and feedback unless they reciprocate.
Output:
success - boolean indicator of success
allowed_to_submit - boolean indicator of whether student has done their needed grading or not
error_message - If not success, explains why
"""
student_id = self.system.anonymous_student_id
success = False
allowed_to_submit = True
try:
response = self.peer_gs.get_data_for_location(self.location.url(), student_id)
count_graded = response['count_graded']
count_required = response['count_required']
student_sub_count = response['student_sub_count']
count_available = response['count_available']
success = True
except GradingServiceError:
# This is a dev_facing_error
log.error("Could not contact external open ended graders for location {0} and student {1}".format(
self.location, student_id))
# This is a student_facing_error
error_message = "Could not contact the graders. Please notify course staff."
return success, allowed_to_submit, error_message
except KeyError:
log.error("Invalid response from grading server for location {0} and student {1}".format(self.location, student_id))
error_message = "Received invalid response from the graders. Please notify course staff."
return success, allowed_to_submit, error_message
if count_graded >= count_required or count_available==0:
error_message = ""
return success, allowed_to_submit, error_message
else:
allowed_to_submit = False
# This is a student_facing_error
error_string = ("<h4>Feedback not available yet</h4>"
"<p>You need to peer grade {0} more submissions in order to see your feedback.</p>"
"<p>You have graded responses from {1} students, and {2} students have graded your submissions. </p>"
"<p>You have made {3} submissions.</p>")
error_message = error_string.format(count_required - count_graded, count_graded, count_required,
student_sub_count)
return success, allowed_to_submit, error_message
def get_rubric(self, _data):
"""
Gets the results of a given grader via ajax.
Input: AJAX data dictionary
Output: Dictionary to be rendered via ajax that contains the result html.
"""
all_responses = []
success, can_see_rubric, error = self.check_if_student_has_done_needed_grading()
if not can_see_rubric:
return {
'html': self.system.render_template(
'{0}/combined_open_ended_hidden_results.html'.format(self.TEMPLATE_DIR),
{'error': error}),
'success': True,
'hide_reset': True
}
contexts = []
rubric_number = self.current_task_number
if self.ready_to_reset:
rubric_number+=1
response = self.get_last_response(rubric_number)
score_length = len(response['grader_types'])
for z in xrange(score_length):
if response['grader_types'][z] in HUMAN_GRADER_TYPE:
try:
feedback = response['feedback_dicts'][z].get('feedback', '')
except TypeError:
return {'success' : False}
rubric_scores = [[response['rubric_scores'][z]]]
grader_types = [[response['grader_types'][z]]]
feedback_items = [[response['feedback_items'][z]]]
rubric_html = self.rubric_renderer.render_combined_rubric(stringify_children(self.static_data['rubric']),
rubric_scores,
grader_types, feedback_items)
contexts.append({
'result': rubric_html,
'task_name': 'Scored rubric',
'feedback' : feedback
})
context = {
'results': contexts,
}
html = self.system.render_template('{0}/combined_open_ended_results.html'.format(self.TEMPLATE_DIR), context)
return {'html': html, 'success': True, 'hide_reset' : False}
def get_legend(self, _data):
"""
Gets the results of a given grader via ajax.
Input: AJAX data dictionary
Output: Dictionary to be rendered via ajax that contains the result html.
"""
context = {
'legend_list': LEGEND_LIST,
}
html = self.system.render_template('{0}/combined_open_ended_legend.html'.format(self.TEMPLATE_DIR), context)
return {'html': html, 'success': True}
def handle_ajax(self, dispatch, data):
"""
This is called by courseware.module_render, to handle an AJAX call.
"data" is request.POST.
Returns a json dictionary:
{ 'progress_changed' : True/False,
'progress': 'none'/'in_progress'/'done',
<other request-specific values here > }
"""
handlers = {
'next_problem': self.next_problem,
'reset': self.reset,
'get_combined_rubric': self.get_rubric,
'get_legend': self.get_legend,
'get_last_response': self.get_last_response_ajax,
'get_current_state': self.get_current_state,
'get_html': self.get_html_ajax,
}
if dispatch not in handlers:
return_html = self.current_task.handle_ajax(dispatch, data, self.system)
return self.update_task_states_ajax(return_html)
d = handlers[dispatch](data)
return json.dumps(d, cls=ComplexEncoder)
def get_current_state(self, data):
"""
Gets the current state of the module.
"""
return self.get_context()
def get_last_response_ajax(self, data):
"""
Get the last response via ajax callback
data - Needed to preserve ajax callback structure
Output: Last response dictionary
"""
return self.get_last_response(self.current_task_number)
def next_problem(self, _data):
"""
Called via ajax to advance to the next problem.
Input: AJAX data request.
Output: Dictionary to be rendered
"""
self.update_task_states()
return {'success': True, 'html': self.get_html_nonsystem(), 'allow_reset': self.ready_to_reset}
def reset(self, data):
"""
If resetting is allowed, reset the state of the combined open ended module.
Input: AJAX data dictionary
Output: AJAX dictionary to tbe rendered
"""
if self.state != self.DONE:
if not self.ready_to_reset:
return self.out_of_sync_error(data)
success, can_reset, error = self.check_if_student_has_done_needed_grading()
if not can_reset:
return {'error': error, 'success': False}
if self.student_attempts >= self.max_attempts - 1:
if self.student_attempts == self.max_attempts - 1:
self.student_attempts += 1
return {
'success': False,
# This is a student_facing_error
'error': (
'You have attempted this question {0} times. '
'You are only allowed to attempt it {1} times.'
).format(self.student_attempts, self.max_attempts)
}
self.student_attempts +=1
self.state = self.INITIAL
self.ready_to_reset = False
for i in xrange(len(self.task_xml)):
self.current_task_number = i
self.setup_next_task(reset=True)
self.current_task.reset(self.system)
self.task_states[self.current_task_number] = self.current_task.get_instance_state()
self.current_task_number = 0
self.ready_to_reset = False
self.setup_next_task()
return {'success': True, 'html': self.get_html_nonsystem()}
def get_instance_state(self):
"""
Returns the current instance state. The module can be recreated from the instance state.
Input: None
Output: A dictionary containing the instance state.
"""
state = {
'version': self.STATE_VERSION,
'current_task_number': self.current_task_number,
'state': self.state,
'task_states': self.task_states,
'student_attempts': self.student_attempts,
'ready_to_reset': self.ready_to_reset,
}
return json.dumps(state)
def get_status(self, render_via_ajax):
"""
Gets the status panel to be displayed at the top right.
Input: None
Output: The status html to be rendered
"""
status = []
current_task_human_name = ""
for i in xrange(0, len(self.task_xml)):
human_task_name = self.extract_human_name_from_task(self.task_xml[i])
# Extract the name of the current task for screen readers.
if self.current_task_number == i:
current_task_human_name = human_task_name
task_data = {'task_number': i + 1, 'human_task': human_task_name, 'current': self.current_task_number==i}
status.append(task_data)
context = {
'status_list': status,
'grader_type_image_dict': GRADER_TYPE_IMAGE_DICT,
'legend_list': LEGEND_LIST,
'render_via_ajax': render_via_ajax,
'current_task_human_name': current_task_human_name,
}
status_html = self.system.render_template("{0}/combined_open_ended_status.html".format(self.TEMPLATE_DIR),
context)
return status_html
def check_if_done_and_scored(self):
"""
Checks if the object is currently in a finished state (either student didn't meet criteria to move
to next step, in which case they are in the allow_reset state, or they are done with the question
entirely, in which case they will be in the self.DONE state), and if it is scored or not.
@return: Boolean corresponding to the above.
"""
return (self.state == self.DONE or self.ready_to_reset) and self.is_scored
def get_weight(self):
"""
Return the weight of the problem. The old default weight was None, so set to 1 in that case.
Output - int weight
"""
weight = self.weight
if weight is None:
weight = 1
return weight
def get_score(self):
"""
Score the student received on the problem, or None if there is no
score.
Returns:
dictionary
{'score': integer, from 0 to get_max_score(),
'total': get_max_score()}
"""
max_score = None
score = None
#The old default was None, so set to 1 if it is the old default weight
weight = self.get_weight()
if self.is_scored:
# Finds the maximum score of all student attempts and keeps it.
score_mat = []
for i in xrange(0, len(self.task_states)):
# For each task, extract all student scores on that task (each attempt for each task)
last_response = self.get_last_response(i)
score = last_response.get('all_scores', None)
if score is not None:
# Convert none scores and weight scores properly
for z in xrange(0, len(score)):
if score[z] is None:
score[z] = 0
score[z] *= float(weight)
score_mat.append(score)
if len(score_mat) > 0:
# Currently, assume that the final step is the correct one, and that those are the final scores.
# This will change in the future, which is why the machinery above exists to extract all scores on all steps
scores = score_mat[-1]
score = max(scores)
else:
score = 0
if self._max_score is not None:
# Weight the max score if it is not None
max_score = self._max_score * float(weight)
else:
# Without a max_score, we cannot have a score!
score = None
score_dict = {
'score': score,
'total': max_score,
}
return score_dict
def max_score(self):
"""
Maximum score possible in this module. Returns the max score if finished, None if not.
"""
max_score = None
if self.check_if_done_and_scored():
max_score = self._max_score
return max_score
def get_progress(self):
"""
Generate a progress object. Progress objects represent how far the
student has gone in this module. Must be implemented to get correct
progress tracking behavior in nested modules like sequence and
vertical. This behavior is consistent with capa.
If the module is unscored, return None (consistent with capa).
"""
d = self.get_score()
if d['total'] > 0 and self.is_scored:
try:
return Progress(d['score'], d['total'])
except (TypeError, ValueError):
log.exception("Got bad progress")
return None
return None
def out_of_sync_error(self, data, msg=''):
"""
return dict out-of-sync error message, and also log.
"""
#This is a dev_facing_error
log.warning("Combined module state out sync. state: %r, data: %r. %s",
self.state, data, msg)
#This is a student_facing_error
return {'success': False,
'error': 'The problem state got out-of-sync. Please try reloading the page.'}
class CombinedOpenEndedV1Descriptor():
"""
Module for adding combined open ended questions
"""
mako_template = "widgets/html-edit.html"
module_class = CombinedOpenEndedV1Module
filename_extension = "xml"
has_score = True
def __init__(self, system):
self.system = system
@classmethod
def definition_from_xml(cls, xml_object, system):
"""
Pull out the individual tasks, the rubric, and the prompt, and parse
Returns:
{
'rubric': 'some-html',
'prompt': 'some-html',
'task_xml': dictionary of xml strings,
}
"""
expected_children = ['task', 'rubric', 'prompt']
for child in expected_children:
if len(xml_object.xpath(child)) == 0:
# This is a staff_facing_error
raise ValueError(
"Combined Open Ended definition must include at least one '{0}' tag. Contact the learning sciences group for assistance. {1}".format(
child, xml_object))
def parse_task(k):
"""Assumes that xml_object has child k"""
return [stringify_children(xml_object.xpath(k)[i]) for i in xrange(0, len(xml_object.xpath(k)))]
def parse(k):
"""Assumes that xml_object has child k"""
return xml_object.xpath(k)[0]
return {'task_xml': parse_task('task'), 'prompt': parse('prompt'), 'rubric': parse('rubric')}
def definition_to_xml(self, resource_fs):
'''Return an xml element representing this definition.'''
elt = etree.Element('combinedopenended')
def add_child(k):
child_str = '<{tag}>{body}</{tag}>'.format(tag=k, body=self.definition[k])
child_node = etree.fromstring(child_str)
elt.append(child_node)
for child in ['task']:
add_child(child)
return elt
| TangXT/GreatCatMOOC | common/lib/xmodule/xmodule/open_ended_grading_classes/combined_open_ended_modulev1.py | Python | agpl-3.0 | 49,426 | 0.00346 |
# -*- coding:utf-8 -*-
from torcms.core import tools
from torcms.model.entity_model import MEntity
class TestMEntity():
def setup(self):
print('setup 方法执行于本类中每条用例之前')
self.uid = tools.get_uuid()
self.path = '/static/123123'
def test_create_entity(self):
uid = self.uid
path = self.path
desc = 'create entity'
kind = 'f'
tt = MEntity.create_entity(uid, path, desc, kind)
assert tt == True
self.tearDown()
def add_message(self):
desc = 'create entity'
kind = 'f'
MEntity.create_entity(self.uid, self.path, desc, kind)
def test_query_recent(self):
a = MEntity.get_by_uid(self.uid)
assert a == None
self.add_message()
a = MEntity.get_by_uid(self.uid)
assert a
self.tearDown()
def test_query_all(self):
self.add_message()
a = MEntity.query_all()
tf = False
for i in a:
if i.uid == self.uid:
tf = True
assert tf
self.tearDown()
def test_get_by_kind(self):
self.add_message()
a = MEntity.get_by_kind(kind='f')
tf = False
for i in a:
if i.uid == self.uid:
tf = True
assert tf
self.tearDown()
def test_get_all_pager(self):
a = MEntity.get_all_pager()
tf = True
for i in a:
if i.uid == self.uid:
tf = False
assert tf
self.add_message()
a = MEntity.get_all_pager()
tf = False
for i in a:
if i.uid == self.uid:
tf = True
assert tf
self.tearDown()
def test_get_id_by_impath(self):
self.add_message()
path = self.path
a = MEntity.get_id_by_impath(path)
assert a.uid == self.uid
self.tearDown()
def test_total_number(self):
b = MEntity.total_number()
self.add_message()
a = MEntity.total_number()
assert b + 1 <= a
self.tearDown()
def test_delete_by_path(self):
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.add_message()
tf = MEntity.get_by_uid(self.uid)
assert tf
MEntity.delete_by_path(self.path)
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.tearDown()
def test_delete(self):
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.add_message()
tf = MEntity.delete(self.uid)
assert tf
tf = MEntity.get_by_uid(self.uid)
assert tf == None
self.tearDown()
def tearDown(self):
print("function teardown")
tt = MEntity.get_by_uid(self.uid)
if tt:
MEntity.delete(tt.uid)
| bukun/TorCMS | tester/test_model/test_entity.py | Python | mit | 2,865 | 0.002115 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: http://web.archive.org/web/20120426224840/http://zetcode.com/tutorials/pyqt4/widgets2
import sys
from PyQt4 import QtGui, QtCore
class Window(QtGui.QWidget):
def __init__(self):
super(Window, self).__init__()
# Create a label
label = QtGui.QLabel("Press Esc to quit.")
# Create the layout
vbox = QtGui.QVBoxLayout()
vbox.addWidget(label)
# Set the layout
self.setLayout(vbox)
self.resize(250, 150)
self.setWindowTitle('Hello')
#self.show()
self.showFullScreen() # <- Full screen
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Escape:
self.close()
def main():
"""Main function"""
app = QtGui.QApplication(sys.argv)
# The default constructor has no parent.
# A widget with no parent is a window.
window = Window()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
if __name__ == '__main__':
main()
| jeremiedecock/snippets | python/pyqt/pyqt4/fullscreen.py | Python | mit | 2,472 | 0.004049 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-01-18 20:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0056_auto_20160118_2012'),
]
operations = [
migrations.AddField(
model_name='bandsawbladeindicator',
name='AE',
field=models.CharField(blank=True, max_length=255, verbose_name='Ä'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='F',
field=models.CharField(blank=True, max_length=255, verbose_name='F'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='RP',
field=models.CharField(blank=True, max_length=255, verbose_name='RP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='S',
field=models.CharField(blank=True, max_length=255, verbose_name='S'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='SP',
field=models.CharField(blank=True, max_length=255, verbose_name='SP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='TP',
field=models.CharField(blank=True, max_length=255, verbose_name='TP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='UE',
field=models.CharField(blank=True, max_length=255, verbose_name='Ü'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='UP',
field=models.CharField(blank=True, max_length=255, verbose_name='UP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='VP',
field=models.CharField(blank=True, max_length=255, verbose_name='VP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='WP',
field=models.CharField(blank=True, max_length=255, verbose_name='WP'),
),
migrations.AddField(
model_name='bandsawbladeindicator',
name='star_p',
field=models.CharField(blank=True, max_length=255, verbose_name='*P'),
),
]
| n2o/guhema | products/migrations/0057_auto_20160118_2025.py | Python | mit | 2,432 | 0.004527 |
__author__ = 'croxis'
from flask import Blueprint
user = Blueprint('user', __name__)
from . import views | croxis/kmr | app/user/__init__.py | Python | mit | 107 | 0.018692 |
"""Service layer (domain model) of practice app
"""
| effa/flocs | practice/services/__init__.py | Python | gpl-2.0 | 53 | 0.018868 |
'''
run where the files are
'''
import json
import os
final_file = "tipo,nombre,nombre_alt\n"
for root, subFolders, files in os.walk(os.getcwd()):
for filename in files:
filePath = os.path.join(root, filename)
if not filePath.endswith(".json") or filename.startswith("_"):
continue
print (" processing " + filePath)
current_text = ""
with open(filePath, 'r', encoding='utf-8-sig') as readme:
current_text = readme.read()
tmp_file = json.loads(current_text)
nombre_alt = "\"\""
if "nombre_alt" in tmp_file:
nombre_alt = tmp_file["nombre_alt"]
final_file += tmp_file["tipo"] + "," + tmp_file["nombre"] + "," + nombre_alt + "\n"
with open(os.path.join(os.getcwd(),"actividades_merged.csv"), 'w', encoding='utf-8-sig') as saveme:
saveme.writelines(final_file) | eikiu/tdf-actividades | _admin-scripts/jsontocsv(activities-name).py | Python | cc0-1.0 | 814 | 0.045455 |
from tkinter import *
# from tkinter.font import *
import math
CW = 800
CH = 600
SENT_H = 50
Y_OFF = 10
X_OFF = 20
DIM_GAP = 10
DIM_OFF = 70
class Multigraph(Canvas):
"""Canvas for displaying the multigraph for a sentence."""
node_rad = 3
def __init__(self, parent, width=CW, height=CH, nnodes=9,
dims=['En LP', 'En ID', 'Sem', 'Am ID', 'Am LP'],
# dims=['En ID', 'Sem', 'Am ID'],
translation=True):
Canvas.__init__(self, parent, width=width, height=height)
# self.draw_arrow(10, 50, 40, 20, 60, 10, 80, 10, 100, 20, 130, 50)
self.parent = parent
self.width = width
self.height = height
self.translation = translation
self.dim_labels = dims
# Calculate the width, height, and positions of the dimensions
self.get_dim_dims()
# Figure node coordinates
node_dist = self.dim_width / nnodes
node_offsets = [node_dist * (i + .5) for i in range(nnodes)]
## for index, off in enumerate(node_offsets):
## dim1.make_node(index, off,
## filled = (index % 3 != 0),
## eos = (index == nnodes - 1))
## dim2.make_node(index, off,
## eos = (index == nnodes - 1))
## dim3.make_node(index, off,
## eos = (index == nnodes - 1))
self.dims = []
for label, x, y in zip(dims, self.dim_x, self.dim_y):
d = Dimension(self, coords=(x, y), label=label,
width=self.dim_width, height=self.dim_height)
self.dims.append(d)
d.draw()
for index, off in enumerate(node_offsets):
d.make_node(index, off, eos = (index == nnodes - 1))
self.dims[0].make_arc(8, 0, tp='root')
self.dims[0].make_arc(1, 3, tp='sbj')
self.dims[0].make_arc(7, 4, tp='mod')
self.dims[0].make_arc(3, 4, tp='rel')
self.dims[0].make_arc(0, 5, tp='obj')
## dim1.make_arc(8, 1, tp='sbj')
## dim1.make_arc(1, 7, tp='obj', color='gray')
## self.dims = [dim1, dim2, dim3]
self.node_connections = []
self.connect_nodes()
self.sentences = []
in_sent = Sentence(self, ['the', 'woman', 'cleaned', 'the', 'house', 'in', 'the', 'city', '.'],
coords=(self.dim_x[0], 580),
width=self.dim_width)
in_sent.draw()
self.sentences.append(in_sent)
## self.connect_sent(in_sent, dim1)
out_sent = Sentence(self, ["እከተማዋ", "ያለውን", "ቤት", "ሴቷ", "ጠረገችው", "።"],
node_indices=[7, 5, 4, 1, 2, 8],
coords=(self.dim_x[-1], 20),
width=self.dim_width)
out_sent.draw()
self.sentences.append(out_sent)
## self.connect_sent(out_sent, dim3)
# self.draw_arrow(10, 80, 80, 20, 150, 80)
# self.draw_arc_label((80, 50), 'sbj')
def get_dim_dims(self):
# Calculate the width, height, and positions of the dimensions
w = self.width - 2 * X_OFF
h = self.height - SENT_H - 2 * Y_OFF
if self.translation:
h -= SENT_H
ndims = len(self.dim_labels)
# Width of dimensions
x_off = DIM_OFF * (ndims - 1)
w_sum = w - x_off
w1 = w_sum # / ndims
# print('Dim w {}'.format(w1))
# Height of dimensions
y_off = DIM_GAP * (ndims - 1)
h_sum = h - y_off
h1 = h_sum / ndims
# print('Dim h {}'.format(h1))
# Figure out the x coordinates of dimensions
x_coords = []
x = X_OFF
for d in self.dim_labels:
x_coords.append(x)
x += DIM_OFF
# Figure out the y coordinates of dimensions
y_coords = []
y = self.height - SENT_H - Y_OFF - h1
for d in self.dim_labels:
y_coords.append(y)
y -= DIM_GAP + h1
self.dim_width = w1
self.dim_height = h1
self.dim_x = x_coords
self.dim_y = y_coords
def connect_nodes(self):
for index, dim in enumerate(self.dims[:-1]):
next_dim = self.dims[index + 1]
for node1, node2 in zip(dim.nodes, next_dim.nodes):
cx1, cy1 = node1.center
cx2, cy2 = node2.center
c_id = self.create_line(cx1, cy1, cx2, cy2,
dash=(3,3))
self.node_connections.append(c_id)
def connect_sent(self, sent, dim):
dim_nodes = dim.nodes
nodes = [dim_nodes[index] for index in sent.node_indices]
for word, node in zip(sent.ids, nodes):
wx, wy = self.coords(word)
nx, ny = node.center
self.create_line(wx, wy, nx, ny, dash=(1, 3))
class Dimension:
"""Graphical representation of an XDG dimension."""
Y_OFF = 15
def __init__(self, canvas, coords=(50, 50), width=500,
height=160, color='black', label='ID'):
self.canvas = canvas
self.color = color
self.label = label
self.coords = coords
self.width = width
self.height = height
self.h2w = self.height / self.width
# print('h2w {}'.format(self.h2w))
self.nodes = []
def draw(self):
c0, c1 = self.coords
self.id = self.canvas.create_rectangle(c0, c1, c0 + self.width, c1 + self.height)
if self.label:
self.make_label()
def make_label(self):
x = self.coords[0] + 25
y = self.coords[1] + 10
self.label_id = self.canvas.create_text(x, y, text=self.label,
font = ("Helvetica", "14"))
def make_node(self, index, offset, eos=False,
filled=True):
node = Node(self.canvas,
center=(self.coords[0] + offset,
self.coords[1] + self.height - self.Y_OFF),
filled=filled,
index=index,
eos=eos)
self.nodes.append(node)
node.draw()
def make_arc(self, i_head, i_dep, tp='', color='black'):
head = self.nodes[i_head]
dep = self.nodes[i_dep]
right = i_dep > i_head
start = head.get_upper_right() if right else head.get_upper_left()
head.source
end = dep.top
# dep.get_upper_left() if right else dep.get_upper_right()
arc = Arc(self.canvas, head, dep, start=start, end=end,
tp=tp, color=color, h2w=1.6 * self.h2w)
arc.draw()
class Node:
"""Graphical representation of an XDG node."""
R = 7
CORNER_OFF = 7 * math.cos(math.radians(45))
def __init__(self, canvas, center=(100, 100), index=0, filled=True, eos=False):
self.canvas = canvas
self.center = center
self.filled = filled
self.index = index
self.eos = eos
self.arcs = []
# upper-left, upper-right,
# lower-right, lower-left
cx, cy = self.center
rad = 2 if self.eos else self.CORNER_OFF
self.corners = [(cx-rad, cy-rad),
(cx+rad , cy-rad ),
(cx+rad , cy+rad ),
(cx-rad , cy+rad )]
self.top = (cx, cy-rad)
self.source = center if self.eos else (cx, cy-rad)
def get_upper_left(self):
return self.corners[0]
def get_upper_right(self):
return self.corners[1]
def draw(self):
x1, y1 = self.corners[0]
x2, y2 = self.corners[2]
if self.eos:
self.id = self.canvas.create_oval(x1, y1, x2, y2, fill='black')
else:
self.id = self.canvas.create_oval(x1, y1, x2, y2,
fill='black' if self.filled else '')
class Arc:
"""Graphical representation of an XDG arc."""
def __init__(self, canvas, head, dep, tp='', color='black',
start=(0,0), end=(100,100), h2w=.625):
self.canvas = canvas
self.head = head
self.dep = dep
self.type = tp
self.start = start
self.end = end
self.midpoint = (0,0)
self.label = None
self.label_center = (0,0)
self.label_id = 0
self.color = color
self.h2w = h2w
self.label_h2w = h2w * .5
def draw(self):
x0, y0 = self.start
x2, y2 = self.end
if x2 > x0:
x1 = x0 + (x2 - x0) / 2
else:
x1 = x2 + (x0 - x2) /2
y1 = y0 - self.h2w * abs(x2 - x0)
self.midpoint = (x1, y1)
self.id = self.canvas.create_line(x0, y0, x1, y1, x2, y2,
smooth=True, arrow='last',
fill=self.color,
splinesteps=24)
if self.type:
self.make_label()
def make_label(self):
self.label = Label(self.canvas, text=self.type,
font=("Courier", "10"))
self.label_center = (self.midpoint[0],
self.start[1] - self.label_h2w * abs(self.start[0] - self.end[0]))
self.label_id = self.canvas.create_window(self.label_center[0],
self.label_center[1],
window=self.label)
# def draw_arc_label(self, center, text=''):
# label = Label(self.parent, text=text)
# cx, cy = center
# self.create_window(cx, cy, window=label)
class Sentence:
"""A sentence being translated or parsed."""
def __init__(self, canvas, words, coords=(0,0),
node_indices=None, inp=True, width=500):
self.canvas = canvas
self.words = words
self.coords = coords
self.width = width
# Input or output sentence
self.inp = inp
# Indices of nodes to join to words
self.node_indices = node_indices if node_indices else range(len(words))
self.ids = []
def draw(self):
gap = self.width / len(self.words)
x, y = self.coords
x += gap / 2
for word in self.words:
id = self.canvas.create_text(x, y,
text=word,
justify=CENTER)
self.ids.append(id)
x += gap
class GraphText(Text):
def __init__(self, parent, width=50, height=5):
Text.__init__(self, parent, width=width, height=height)
self.insert('0.5', "ጤናይስጥልኝ")
def run():
root = Tk()
# text = GraphText(root, width=width, height=height)
# text.grid()
canvas = Multigraph(root)
canvas.grid()
root.mainloop()
| LowResourceLanguages/hltdi-l3 | l3xdg/graphics.py | Python | gpl-3.0 | 10,943 | 0.004132 |
#!/usr/bin/env python3
from socket import *
import _thread
import sys
def enviardatos(sock):
data = input()
enviar = data.encode()
sock.sendto(enviar,('localhost',23456))
if data == "bye":
print("Closing Client\n")
sock.close()
return 0
_thread.start_new_thread(recibirdatos,(('localhost',23456),sock))
while 1:
data = input()
enviar = data.encode()
sock.sendto(enviar,('localhost',23456))
if data == "bye":
print("Closing Client\n")
sock.close()
break
else:
if data == "bye":
print("Closing client\n")
sock.close()
sys.exit(0)
def recibirdatos(tupla,sock):
while 1:
try:
msg,server = sock.recvfrom(1024)
except OSError:
sys.exit(0)
data = msg.decode()
print(data)
if data == "bye":
print("Closing client\n")
sock.close()
def main():
sock = socket(AF_INET,SOCK_DGRAM)
enviardatos(sock)
main()
| CarlosCebrian/RedesII_Ejercicios | Practica2_RedesII/chatudp.py | Python | gpl-2.0 | 1,117 | 0.014324 |
__version__ = '0.1.4'
import requests
import re
import json
from bs4 import BeautifulSoup
# TODO: def see_also() => makes a whole set of related thhings to the topic
# chosen
# TODO:
# def chossy() => parse disambiguation pages can be called
# when the page reached durign climb or
# any given method in the class and it hits a "chossy page"
# one that cannot be parsed in this custiomary
# method ie a disambiguation page or otherwise
# TODO:
# def flash() => grab directly a section of the overall page when supplied
# a set of context levels and/or a bit of text that it can match
# climb links should build based on a depth choice and and builds graph of
# links to help determine later searches
# TODO: add comments to this
# TODO: bolts should also allow for optional images.
# TODO:
# climb should have options (object) passed in to allow it to include images
# in route or to include graph of links with given
# level of depth
# TODO:
# You are creating context and subcontexts, text, links => Bolt() object
# and loading into an Array building structure to the wiki itself
# (or any large text based information page) that can be accessed
# parsed as such. Later should incorporate other checks to find titles and
# context that are more universal.
# TODO:
# Should also work with any amount of headers
# fix the h1 - ?? checks so they are extensible rather than hard coded
# this so it matches the h# set up and loops to
# decide on depth or just inputs the number found
# as the hash for the entry (headers define amounts of context)
# TODO: create overall function that sanitizes the strings for printing them
# "pretty"
# TODO: Replace complex words with definitions you find in the underlying link
# or using dictionary.
# TODO: Build some test harnesses for API and Restful-API.
# TODO: Return related topics and souroundign topics using wikis dropdowns,
# as part of climb or as separate API function.
def check_text(text):
if(text != "Contents" and text != ""):
return text
def chossy():
return {"error": "This is a Disambiguation Page...\n\n"}
class Bolt():
def __init__(self, text):
self.contexts = {}
self.text = text
self.images = None
# Add context to bolt.
def belay(self, context, level=None):
if(not level):
self.contexts = {}
self.contexts["one"] = context
else:
self.contexts[level] = context
# Encodes bolt for json formatting.
def encode(self):
return {"text": self.text, "contexts": self.contexts}
def __str__(self):
temp = "Text: " + self.text
temp += "\nContext:"
for key in self.contexts:
temp += "\nlvl" + key + ": " + self.contexts[key]
return temp
class Climber():
# Constructs route of entire wiki page based on topic chosen.
def __init__(self, options=None):
self.options = {} if not options else options
def climb(self, topic):
self.depth = self.options["depth"] if "depth" in self.options.keys() else None
self.summary = self.options["summary"] if "summary" in self.options.keys() else None
if(topic is None):
return None
else:
url = 'http://en.wikipedia.org/?title=%s' % topic
content = requests.get(url)
self.soup = BeautifulSoup(content.text, "html.parser")
check = self.soup.find_all(id="disambigbox")
return self.get_scaffold(check)
# Extracts images given a topic.
def climb_images(self, topic=None):
images = []
if(topic is None):
check = self.soup.find_all(id="disambigbox")
for image in self.soup.findAll("img"):
images.append("https://" + image["src"])
else:
url = 'http://en.wikipedia.org/?title=%s' % topic
content = requests.get(url)
self.soup = BeautifulSoup(content.text, "html.parser")
check = self.soup.find_all(id="disambigbox")
if(check):
for image in self.soup.findAll("img"):
images.append("https://" + image["src"])
else:
return chossy()
return json.dumps(images)
def get_scaffold(self, check):
# TODO: WIll cause a toggle based on passed type in which case the
# include summary scaffold will be used but no matter what the depth
# will be passed to scaffold defaulting to 0
if(not len(check)):
images_list = None
wiki_parsed = self.scaffold_basic(self.summary, self.depth)
if("images" in self.options.keys()):
images_list = self.climb_images()
if(images_list is None):
return json.dumps({"data": wiki_parsed})
else:
return json.dumps({"data": wiki_parsed,
"images": images_list})
else:
# TODO: WIll return all the other options to search from
# disambiguation page
return chossy()
def scaffold_basic(self, summary, depth):
selected = []
h = ["", "", "", ""]
for section in self.soup.find_all(["h1", "h2", "h3", "h4", "p"]):
try:
if(section.name == "h1"):
text = section.get_text()
if(check_text(text)):
h[0] = text
elif(section.name == "h2"):
text = section.get_text()
if(check_text(text)):
h[1] = text
h[2] = ""
h[3] = ""
elif(section.name == "h3"):
text = section.get_text()
if(check_text(text)):
h[2] = text
h[3] = ""
elif(section.name == "h4"):
text = section.get_text()
if(check_text(text)):
h[3] = text
elif(section.name == "p"):
# Add text to the bolt.
string = section.get_text()
if(string != ""):
string = re.sub(r"\[\d+\]", "", string)
bolt = Bolt(string)
bolt.belay(h[0], "one")
bolt.belay(h[1], "two")
bolt.belay(h[2], "three")
bolt.belay(h[3], "four")
selected.append(bolt.encode())
else:
continue
pass
except Exception as e:
print e
continue
return selected
# Builds map of links with given search depth option as parameter.
# def climb_links(self, topic, options):
# if(not len(check)):
# link_query = 'div#mw-content-text a'
# links = [a.get('href') for a in self.soup.select(link_query)]
# return json.dumps(links)
# else:
# return chossy()
| TheSighing/climber | climber/__init__.py | Python | mit | 7,197 | 0.000973 |
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import corepy.lib.extarray as extarray
import corepy.arch.spu.isa as spu
import corepy.arch.spu.platform as env
import corepy.arch.spu.lib.dma as dma
from corepy.arch.spu.lib.util import load_word
# This example program demonstrates sending mailbox messages from one SPU to
# another. In order for an SPU to send messages/signals to another SPU, the
# source SPU must know that base address of the memory-mapped problem state area
# of the target SPU. However the addresses are not known until the SPUs have
# been started, so the addresses must be passed to the SPUs by the PPU. The PPU
# builds one array of the addresses for the SPUs, then gives the address of this
# array to each SPU to DMA into local store and load into registers.
# A race condition is possible if mailboxes are used to send the address of the
# array. What can happen is that an earlier SPU gets the message, loads the
# array into registers, and sends a mailbox message to a following SPU, before
# that following SPU receives the initial array address message from the PPU.
# The solution used in this example program is to use signal to send the array
# address instead of a mailbox.
if __name__ == '__main__':
SPUS = 6
proc = env.Processor()
prgms = [env.Program() for i in xrange(0, SPUS)]
for rank, prgm in enumerate(prgms):
code = prgm.get_stream()
spu.set_active_code(code)
# First all the SPUs should start up and wait for an mbox message.
# The PPU will collect all the PS map addresses into an array for the SPUs.
r_psinfo_mma = dma.spu_read_signal1(code)
# DMA the PS info into local store
dma.mem_get(code, 0x0, r_psinfo_mma, SPUS * 4 * 4, 17)
dma.mem_complete(code, 17)
# Load the PS info into some registers.. one register per address
r_psinfo = prgm.acquire_registers(SPUS)
for i in xrange(0, SPUS):
spu.lqd(r_psinfo[i], code.r_zero, i)
# Initialize a data register with this rank and store it at LSA 0
r_send = prgm.acquire_register()
load_word(code, r_send, rank)
spu.stqd(r_send, code.r_zero, 0)
prgm.release_register(r_send)
# Send our rank as a mailbox message to the rank after this rank
dma.mem_write_in_mbox(code, r_psinfo[(rank + 1) % SPUS], 12, 18)
dma.mem_complete(code, 18)
# Receive the message the preceding rank sent
r_recv = dma.spu_read_in_mbox(code)
# Write the value out the interrupt mailbox for the PPU
dma.spu_write_out_intr_mbox(code, r_recv)
code.prgm.release_register(r_recv)
prgm.add(code)
# Start the SPUs
id = [proc.execute(prgms[i], async = True) for i in xrange(0, SPUS)]
# Set up an array of pointers to PS maps.
psinfo = extarray.extarray('I', SPUS * 4)
for i in xrange(0, SPUS * 4, 4):
psinfo[i] = id[i / 4].spups
psinfo.synchronize()
# Send the psinfo address to all the SPUs.
addr = psinfo.buffer_info()[0]
for i in xrange(0, SPUS):
env.spu_exec.write_signal(id[i], 1, addr)
# Wait for a mailbox message from each SPU; the value should be the preceding
# rank. Join each SPU once the message is received, too.
for i in xrange(0, SPUS):
val = env.spu_exec.read_out_ibox(id[i])
assert(val == (i - 1) % SPUS)
proc.join(id[i])
| matthiaskramm/corepy | examples/spu_interspu.py | Python | bsd-3-clause | 5,421 | 0.009592 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
from telemetry.page import block_page_measurement_results
from telemetry.page import buildbot_page_measurement_results
from telemetry.page import csv_page_measurement_results
from telemetry.page import page_measurement_results
from telemetry.page import page_test
class MeasurementFailure(page_test.Failure):
"""Exception that can be thrown from MeasurePage to indicate an undesired but
designed-for problem."""
pass
class PageMeasurement(page_test.PageTest):
"""Glue code for running a measurement across a set of pages.
To use this, subclass from the measurement and override MeasurePage. For
example:
class BodyChildElementMeasurement(PageMeasurement):
def MeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.body.children.length')
results.Add('body_children', 'count', body_child_count)
if __name__ == '__main__':
page_measurement.Main(BodyChildElementMeasurement())
To add test-specific options:
class BodyChildElementMeasurement(PageMeasurement):
def AddCommandLineOptions(parser):
parser.add_option('--element', action='store', default='body')
def MeasurePage(self, page, tab, results):
body_child_count = tab.EvaluateJavaScript(
'document.querySelector('%s').children.length')
results.Add('children', 'count', child_count)
"""
def __init__(self,
action_name_to_run='',
needs_browser_restart_after_each_run=False,
discard_first_result=False,
clear_cache_before_each_run=False):
super(PageMeasurement, self).__init__(
'_RunTest',
action_name_to_run,
needs_browser_restart_after_each_run,
discard_first_result,
clear_cache_before_each_run)
def _RunTest(self, page, tab, results):
results.WillMeasurePage(page)
self.MeasurePage(page, tab, results)
results.DidMeasurePage()
def AddOutputOptions(self, parser):
super(PageMeasurement, self).AddOutputOptions(parser)
parser.add_option('-o', '--output',
dest='output_file',
help='Redirects output to a file. Defaults to stdout.')
parser.add_option('--output-trace-tag',
default='',
help='Append a tag to the key of each result trace.')
@property
def output_format_choices(self):
return ['buildbot', 'block', 'csv', 'none']
def PrepareResults(self, options):
if hasattr(options, 'output_file') and options.output_file:
output_stream = open(os.path.expanduser(options.output_file), 'w')
else:
output_stream = sys.stdout
if not hasattr(options, 'output_format'):
options.output_format = self.output_format_choices[0]
if not hasattr(options, 'output_trace_tag'):
options.output_trace_tag = ''
if options.output_format == 'csv':
return csv_page_measurement_results.CsvPageMeasurementResults(
output_stream,
self.results_are_the_same_on_every_page)
elif options.output_format == 'block':
return block_page_measurement_results.BlockPageMeasurementResults(
output_stream)
elif options.output_format == 'buildbot':
return buildbot_page_measurement_results.BuildbotPageMeasurementResults(
trace_tag=options.output_trace_tag)
elif options.output_format == 'none':
return page_measurement_results.PageMeasurementResults(
trace_tag=options.output_trace_tag)
else:
# Should never be reached. The parser enforces the choices.
raise Exception('Invalid --output-format "%s". Valid choices are: %s'
% (options.output_format,
', '.join(self.output_format_choices)))
@property
def results_are_the_same_on_every_page(self):
"""By default, measurements are assumed to output the same values for every
page. This allows incremental output, for example in CSV. If, however, the
measurement discovers what values it can report as it goes, and those values
may vary from page to page, you need to override this function and return
False. Output will not appear in this mode until the entire pageset has
run."""
return True
def MeasurePage(self, page, tab, results):
"""Override to actually measure the page's performance.
page is a page_set.Page
tab is an instance of telemetry.core.Tab
Should call results.Add(name, units, value) for each result, or raise an
exception on failure. The name and units of each Add() call must be
the same across all iterations. The name 'url' must not be used.
Prefer field names that are in accordance with python variable style. E.g.
field_name.
Put together:
def MeasurePage(self, page, tab, results):
res = tab.EvaluateJavaScript('2+2')
if res != 4:
raise Exception('Oh, wow.')
results.Add('two_plus_two', 'count', res)
"""
raise NotImplementedError()
| pozdnyakov/chromium-crosswalk | tools/telemetry/telemetry/page/page_measurement.py | Python | bsd-3-clause | 5,239 | 0.004772 |
# AUTHOR: Kale Miller
# DESCRIPTION: Contains the core classes for the program.
# 50726f6772616d6d696e6720697320627265616b696e67206f66206f6e652062696720696d706f737369626c65207461736b20696e746f20736576
# 6572616c207665727920736d616c6c20706f737369626c65207461736b732e
# DEVELOPMENT LOG:
# 05/12/16: Initialized classes file. Created the parent container class along with children for the basic container,
# the heavy container and the refrigerated container.
# 06/12/16: Fixed some of the public attributes to make them private.
# 07/12/16: Renamed the file to 'containers'. Added 'real' time processing of containers for load/unload. Removed
# some magic numbers and placed them at the top of the script.
# 12/12/16: Fixed some of the methods that failed under unit tests.
# 15/12/16: Added methods to add auxilary labels. Added method to generate information label. Small bug fixes.
# TODO: Somehow make the init call on the base class not hardcoded in.
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~IMPORTS/GLOBALS~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from time import sleep
TEMP_LOW = -20.0
TEMP_HIGH = 10.0
UNLOAD_TIME = 0.75
LOAD_TIME = 0.75
LABEL_APPLICATION_TIME = 0.1
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.:.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~MAIN~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class _BaseContainer:
"""The parent class for which all containers are inherited."""
def __init__(self, id, info, destination):
"""
Initialises the class. id is the id tag on the container while info is the additional information you
wish to include on the label, such as contents or comments.
"""
# TODO: Change the assertion error to a value error.
assert isinstance(id, str) and isinstance(info, str), "Some of the parameters passed aren't strings."
self._id = id
self._info = info
self._destination = destination
self._labels = list()
self._auxilarylabelsalreadyadded = True
self._type = 'N/A'
self._loaded = False
self._onship = True
self._weight = None
self._currentlocation = self.currentlocation()
def currentlocation(self):
"""Find where the container currently is."""
if self._onship:
loc = 'On the ship'
elif self._loaded:
loc = 'Loaded'
elif not self._onship and not self._loaded:
loc = 'Holding bay'
self._currentlocation = loc
return loc
def settype(self, type):
"""Sets the type of the container."""
self._type = type
return None
def id(self):
"""Fetches the container's id."""
return self._id
def information(self):
"""Print the information about this container."""
print "----------------------------------------------------------------------"
print "CONTAINER: %s" % self._id
print "INFORMATION: %s" % self._info
print "DESTINATION: %s" % self._destination
print "LABELS: %s" % str(self._labels)
print "CURRENT LOCATION: %s" % self._currentlocation
print "----------------------------------------------------------------------"
return None
def _informationlabel(self):
"""Generates a label that contains information about the container."""
return "INFORMATION: %s. DESTINATION: %s." % (self._info,self._destination)
def addidtag(self):
"""Adds a id tag to the container."""
self._labels.append(self._id)
return None
def addlabel(self, label2add, debug=False, quiet=False):
"""Add a label to the container (e.g. fragile)."""
self._labels.append(label2add)
if not quiet: print "Added the label %r to container %s." % (label2add, self._id)
if not debug: sleep(LABEL_APPLICATION_TIME)
return None
def defineweight(self, m):
"""Defines the weight of the container."""
self._weight = m
return None
def weight(self):
"""Returns the weight of the container."""
return self._weight
def removelabel(self, label2remove):
"""Removes a label from the container."""
try:
self._labels.index(label2remove)
except ValueError:
print "The label %r is not on container %s." % (label2remove, self._id)
else:
print "Successfully removed the label %r from container %s." % (label2remove, self._id)
def load(self, load_location, debug=False):
"""Loads the container."""
if not self._auxilarylabelsalreadyadded:
print "WARNING: All of the required labels haven't been added to container %s" % self._id
if self._onship:
print "Container %s is still on the ship." % self._id
elif self._loaded:
print "Container %s is already loaded on %s." % (self._id, load_location)
elif not self._loaded:
print "Loading container %s onto %s." % (self._id, load_location)
if not debug: sleep(LOAD_TIME)
self._loaded = True
else:
raise RuntimeError, "There was a problem with container %s while loading." % self._id
self.currentlocation()
return None
def unload(self, debug=False):
"""Unloads the container."""
if self._onship:
print "Unloading container %s." % self._id
if not debug: sleep(UNLOAD_TIME)
self._onship = False
elif not self._onship:
print "Container %s has already been unloaded." % self._id
else:
raise RuntimeError, "There was a problem with container %s while unloading from the ship." % self._id
self.currentlocation()
return None
class BasicContainer(_BaseContainer):
"""The most basic container possible."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('basic')
self._auxilarylabelsalreadyadded = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
return None
class HeavyContainer(_BaseContainer):
"""The heavy type container."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('heavy')
self._auxilarylabelsalreadyadded = False
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('NOTE: Heavy container.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
class RefrigeratedContainer(_BaseContainer):
"""The refrigerated container."""
def __init__(self, id, info, destination):
_BaseContainer.__init__(self, id, info, destination) # Call the parent class' constructor.
self.settype('refrigerated')
self._auxilarylabelsalreadyadded = False
self._temp = 0.0 # Set in celsius.
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('NOTE: Refrigerated. Handle with care.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def assertValidTemp(self, T):
"""Asserts that the temperature is valid."""
assert isinstance(T, float) or isinstance(T, int), "Temperature must be a number."
assert TEMP_LOW <= T <= TEMP_HIGH, "Temperature specified is outside functioning range."
def settemp(self, newtemp):
"""Sets the temperature of the container."""
self._temp = newtemp
print "The temperature of container %s is now %f degrees." % (self._id, newtemp)
return None
def checktemp(self):
"""Checks the temperature of the container."""
print "The current temperature of container %s is %f degrees." % self._temp
class LiquidContainer(HeavyContainer):
"""A liquid container that inherits from the heavy container type."""
def __init__(self, id, info, destination, occupiedvolume, totalvolume):
"""Initialise the class."""
HeavyContainer.__init__(self, id, info, destination)
self.settype('liquid')
self._auxilarylabelsalreadyadded = False
self._occupiedvolume = occupiedvolume
self._totalvolume = totalvolume
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('Contains liquids.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def _validvolumetoadd(self, V):
"""Sanity checks on adding volume."""
if not (isinstance(V, int) or isinstance(V, float)):
print "The volume must be either a float or an integer."
return False
elif V > self._totalvolume - self._occupiedvolume:
print "There isn't enough space in container %s to add an additional %f litres." % (self._id, V)
return False
elif V < 0:
print "Volume must be a positive number."
return False
else:
return True
def _validvolumetoremove(self, V):
"""Sanity checks on removing volume."""
if not (isinstance(V, int) or isinstance(V, float)):
print "The volume must be either a float or an integer."
return False
elif V > self._occupiedvolume:
print "There isn't enough liquid in container %s to remove %f litres." % (self._id, V)
return False
elif V < 0:
print "Volume must be a positive number."
return False
else:
return True
def setoccupiedvolume(self, V):
"""Set the current volume of liquid in the tank."""
if (isinstance(V, int) or isinstance(V, float)) and 0 < V < self._totalvolume:
self._occupiedvolume = V
return None
def addvolume(self, V):
"""Fills the tank by the specified volume."""
if self._validvolumetoadd(V):
self._occupiedvolume += V
return None
def removevolume(self, V):
"""Drains the tank by the specified volume."""
if self._validvolumetoremove(V):
self._occupiedvolume -= V
return None
class ExplosivesContainer(HeavyContainer):
"""A heavy container that carries explosives."""
def __init__(self, id, info, destination):
"""Initialise the class."""
HeavyContainer.__init__(self, id, info, destination)
self.settype('explosives')
self._auxilarylabelsalreadyadded = False
self.safe = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('CAUTION: Dangerous goods inside. Handle with care.', quiet=True)
self.addlabel('Do not leave container in hot conditions.', quiet=True)
self.addlabel('No naked flames.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def checkifsafe(self):
"""Checks the explosives to make sure they are in safe conditions."""
if self.safe:
print "Container %s is operating under safe conditions." % self._id
if not self.safe:
print "!!! DANGER: CONTAINER %s IS OPERATING UNDER UNSAFE CONDITIONS !!!" % self._id
return None
def emergencyprocedures(self):
"""A command to be used only when the explosives are likely to detonate."""
# There is nothing here yet because there is nothing I could program in.
return None
class ToxicContainer(BasicContainer):
"""A container that contains toxins."""
def __init__(self, id, info, destination):
BasicContainer.__init__(self, id, info, destination)
self.settype('toxic')
self._auxilarylabelsalreadyadded = False
self.safe = True
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('CAUTION: Dangerous goods inside. Handle with care.', quiet=True)
self.addlabel('Do not ingest goods.', quiet=True)
self.addlabel('If leakage, call toxins hotline at 1900 882 301.', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
def checkifsafe(self):
"""A check to confirm that the container isn't leaking and is safe to be around."""
if self.safe:
print "Container %s is operating under safe conditions." % self._id
if not self.safe:
print "!!! DANGER: CONTAINER %s IS OPERATING UNDER UNSAFE CONDITIONS !!!" % self._id
return None
class ChemicalContainer(RefrigeratedContainer, ToxicContainer):
"""A chemical container that is both refrigerated and toxic."""
def __init__(self, id, info, destination):
RefrigeratedContainer.__init__(self, id, info, destination)
ToxicContainer.__init__(self, id, info, destination)
self.settype('chemical')
self._auxilarylabelsalreadyadded = False
def addauxilarylabels(self, debug=False):
"""Adds the extra labels that are required on the container."""
self.addlabel(self._informationlabel(), quiet=True)
self.addlabel('CAUTION: Dangerous goods inside. Handle with care.', quiet=True)
self.addlabel('Do not ingest goods.', quiet=True)
self.addlabel('CAUTION: Strong chemicals.', quiet=True)
self.addlabel('If leak call chemicals hotline at 1900 772 900', quiet=True)
self._auxilarylabelsalreadyadded = True
return None
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~.:.~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
| kmiller96/Shipping-Containers-Software | lib/containers.py | Python | mit | 14,805 | 0.002972 |
#1strand Bushing Tool
#Standalone program for minimized cruft
import math
print "This program is for printing the best possible circular bushings"
print "Printer config values are hardcoded for ease of use (for me)"
xpath = [] #These are initialized and default values
ypath = []
zpath = []
step = []
epath = []
xstart = 10.0
ystart = 10.0
zstart = 0.5
height = 0.0
LayerHeight = 0.3
ExtrusionWidth = 0.6
FilamentDiameter=3
FilamentArea = FilamentDiameter * FilamentDiameter * 3.14159 / 4.0
GooCoefficient = LayerHeight * ExtrusionWidth / FilamentArea
configlist = [LayerHeight, ExtrusionWidth, FilamentDiameter, GooCoefficient]
BrimDiameter = 0.0
OuterDiameter = 0.0
InnerDiameter = 0.0
N = 1
ActualExtrusionWidth = ExtrusionWidth
print "Current values are:"
print "LayerHeight =", configlist[0] #This assignment is super important
print "ExtrusionWidth=", configlist[1] #and needs to be consistent with
print "FilamentDiameter=", configlist[2] #with other code blocks related
print "GooCoefficient=", configlist[3] #to these options.
BrimDiameter = float(raw_input("Enter brim diameter in mm:"))
OuterDiameter = float(raw_input("Enter Outer Diameter in mm:"))
InnerDiameter = float(raw_input("Enter Inner Diameter in mm:"))
N = int(raw_input("Enter number of line segments in your alleged circles"))
anglestep = 2 * math.pi / N
print "Angular step is ", anglestep, " radians."
height = float(raw_input("Enter Height"))
centerx = (BrimDiameter / 2.0)+5 #Center is chosen so brim is 5mm from edge
centery = (BrimDiameter / 2.0)+5 #Center is chosen so brim is 5mm from edge
thickness = (OuterDiameter-InnerDiameter)/2
perimeters = thickness/ExtrusionWidth
print "Thickness = ", thickness
print "Needed perimeters = ", perimeters
perimeters = int(perimeters)
ActualExtrusionWidth = thickness/perimeters
print "Revised perimeters = ", perimeters
print "Revised extrusion width = ", ActualExtrusionWidth
BrimThickness = (BrimDiameter-InnerDiameter)/2
BrimPerimeters = int(BrimThickness/ActualExtrusionWidth)
print "Brim Thickness = ", BrimThickness
print "Brim Perimeters = ", BrimPerimeters
#Brim layer is first, and treated separately.
j=0
i=0
radius = BrimDiameter/2 - (j+0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(LayerHeight)
while (j<BrimPerimeters):
radius = BrimDiameter/2 - (j+0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<N):
i=i+1
#print "i=", i, "j=", j, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
zpath.append(LayerHeight)
#
#
#
#Now the actual bushing begins printing.
#
#
#
CurrentLayer=1
CurrentHeight=LayerHeight*CurrentLayer #Technically should be earlier but wutev
#
#
#
#Now the actual bushing begins printing.
#
#
#
#k=0
##Even layers (1st bushing layer is 2) are inside to outside
##odd layers are outside to inside, to maintain strand continuity
#j=0
#i=0
#radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
#xpath.append(centerx+radius)
#ypath.append(centery)
#zpath.append(CurrentHeight)
#while (j<=perimeters):
# radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
# j=j+1
# i=0
# while (i<N):
# i=i+1
# #print "i=", i, "j=", j, "radius=", radius
# xpath.append(centerx+radius*math.cos(i*anglestep))
# ypath.append(centery+radius*math.sin(i*anglestep))
# zpath.append(CurrentHeight)
##odd layers are outside to inside, to maintain strand continuity
#CurrentLayer=3
#CurrentHeight=LayerHeight*CurrentLayer
#j=0
#i=0
#radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
#xpath.append(centerx+radius)
#ypath.append(centery)
#zpath.append(CurrentHeight)
#while (j<perimeters):
# radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
# j=j+1
# i=0
# while (i<N):
# i=i+1
# #print "i=", i, "j=", j, "radius=", radius
# xpath.append(centerx+radius*math.cos(i*anglestep))
# ypath.append(centery+radius*math.sin(i*anglestep))
# zpath.append(CurrentHeight)
while (CurrentLayer*LayerHeight < height):
CurrentLayer=CurrentLayer+1
CurrentHeight=LayerHeight*CurrentLayer
#Even layers (1st bushing layer is 2) are inside to outside
#odd layers are outside to inside, to maintain strand continuity
j=1
i=0
radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(CurrentHeight-LayerHeight*0.75)
while (j<=perimeters):
radius = InnerDiameter/2 + (j-0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<(N-1)): #kludge
i=i+1
#print "i=", i, "j=", j, "layer=", CurrentLayer, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
if (i==1 and j==1):
zpath.append(CurrentHeight-LayerHeight*.25)
else:
zpath.append(CurrentHeight)
#odd layers are outside to inside, to maintain strand continuity
CurrentLayer=CurrentLayer+1
CurrentHeight=LayerHeight*CurrentLayer
j=0
i=0
radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
xpath.append(centerx+radius)
ypath.append(centery)
zpath.append(CurrentHeight-LayerHeight*.75)
while (j<perimeters):
radius = OuterDiameter/2 - (j+0.5)*ActualExtrusionWidth
j=j+1
i=0
while (i<(N-1)): #Same kludge as the even layers.
i=i+1
#print "i=", i, "j=", j, "layer=", CurrentLayer, "radius=", radius
xpath.append(centerx+radius*math.cos(i*anglestep))
ypath.append(centery+radius*math.sin(i*anglestep))
if (i==1 and j==1):
zpath.append(CurrentHeight-LayerHeight*.25)
else:
zpath.append(CurrentHeight)
#Extrusion is only handled here temporarily for testing
for x in xrange(len(xpath)): # This initializes the arrays so I can
step.append(0.0) #avoid that append() bullshit where I dont
epath.append(0.0) #know where I'm writing.
for x in xrange(2, len(xpath)): # This calculates how much extruder movement per step
distance=((xpath[x]-xpath[x-1])**2+(ypath[x]-ypath[x-1])**2)**0.5
step[x]=distance*GooCoefficient
epath[x]=epath[x-1]+step[x]
#for x in range(len(xpath)): #Human readable raw output
# print xpath[x-1], ypath[x-1], zpath[x-1], step[x-1], epath[x-1]
goutput = open("output1.gcode", "wb") #Now save to output1.gcode
goutput.write("G28 \nG21 \nG90 \nG92 E0 \nM82")
x=0
for x in range(len(xpath)):
goutput.write("G1 X" );
goutput.write( str(xpath[x]) );
goutput.write( " Y" );
goutput.write( str(ypath[x]) );
goutput.write( " Z" );
goutput.write( str(zpath[x]) );
goutput.write( " E" );
goutput.write( str(epath[x]) );
goutput.write( " F2000 \n" );
goutput.close()
| kanethemediocre/1strand | 1strandbushinga002.py | Python | gpl-2.0 | 6,978 | 0.019633 |
#!/usr/bin/env python
import paho.mqtt.client as mqtt
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("presence/+")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print(msg.topic+" "+str(msg.payload))
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("slug", 1883, 60)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
| frink182/pi_temps | mqtt_listener.py | Python | gpl-2.0 | 882 | 0.003401 |
class BTree:
def __init__(self, b_tree_list=list()):
self.b_tree_list = b_tree_list
self.levels = len(b_tree_list)
def visualise(self):
for index, level in enumerate(self.b_tree_list):
spacing = 2 ** (self.levels - index) - 1
print(((spacing-1)//2)*' ', end='')
for node in level:
if node is None:
print(' ', end='')
else:
print(node, end='')
print(spacing * ' ', end='')
print('') # newline
def invert(self):
for level in self.b_tree_list:
level.reverse()
example_tree = BTree([
[4],
[2, 7],
[1, 3, 6, 9]])
example_tree.visualise()
example_tree.invert()
example_tree.visualise()
| DakRomo/2017Challenges | challenge_4/python/ning/challenge_4.py | Python | mit | 790 | 0 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
from typing import Dict, List, Optional
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook
class AwsGlueJobHook(AwsBaseHook):
"""
Interact with AWS Glue - create job, trigger, crawler
:param s3_bucket: S3 bucket where logs and local etl script will be uploaded
:type s3_bucket: Optional[str]
:param job_name: unique job name per AWS account
:type job_name: Optional[str]
:param desc: job description
:type desc: Optional[str]
:param concurrent_run_limit: The maximum number of concurrent runs allowed for a job
:type concurrent_run_limit: int
:param script_location: path to etl script on s3
:type script_location: Optional[str]
:param retry_limit: Maximum number of times to retry this job if it fails
:type retry_limit: int
:param num_of_dpus: Number of AWS Glue DPUs to allocate to this Job
:type num_of_dpus: int
:param region_name: aws region name (example: us-east-1)
:type region_name: Optional[str]
:param iam_role_name: AWS IAM Role for Glue Job Execution
:type iam_role_name: Optional[str]
:param create_job_kwargs: Extra arguments for Glue Job Creation
:type create_job_kwargs: Optional[dict]
"""
JOB_POLL_INTERVAL = 6 # polls job status after every JOB_POLL_INTERVAL seconds
def __init__(
self,
s3_bucket: Optional[str] = None,
job_name: Optional[str] = None,
desc: Optional[str] = None,
concurrent_run_limit: int = 1,
script_location: Optional[str] = None,
retry_limit: int = 0,
num_of_dpus: int = 10,
iam_role_name: Optional[str] = None,
create_job_kwargs: Optional[dict] = None,
*args,
**kwargs,
):
self.job_name = job_name
self.desc = desc
self.concurrent_run_limit = concurrent_run_limit
self.script_location = script_location
self.retry_limit = retry_limit
self.num_of_dpus = num_of_dpus
self.s3_bucket = s3_bucket
self.role_name = iam_role_name
self.s3_glue_logs = 'logs/glue-logs/'
self.create_job_kwargs = create_job_kwargs or {}
kwargs['client_type'] = 'glue'
super().__init__(*args, **kwargs)
def list_jobs(self) -> List:
""":return: Lists of Jobs"""
conn = self.get_conn()
return conn.get_jobs()
def get_iam_execution_role(self) -> Dict:
""":return: iam role for job execution"""
iam_client = self.get_client_type('iam', self.region_name)
try:
glue_execution_role = iam_client.get_role(RoleName=self.role_name)
self.log.info("Iam Role Name: %s", self.role_name)
return glue_execution_role
except Exception as general_error:
self.log.error("Failed to create aws glue job, error: %s", general_error)
raise
def initialize_job(self, script_arguments: Optional[dict] = None) -> Dict[str, str]:
"""
Initializes connection with AWS Glue
to run job
:return:
"""
glue_client = self.get_conn()
script_arguments = script_arguments or {}
try:
job_name = self.get_or_create_glue_job()
job_run = glue_client.start_job_run(JobName=job_name, Arguments=script_arguments)
return job_run
except Exception as general_error:
self.log.error("Failed to run aws glue job, error: %s", general_error)
raise
def get_job_state(self, job_name: str, run_id: str) -> str:
"""
Get state of the Glue job. The job state can be
running, finished, failed, stopped or timeout.
:param job_name: unique job name per AWS account
:type job_name: str
:param run_id: The job-run ID of the predecessor job run
:type run_id: str
:return: State of the Glue job
"""
glue_client = self.get_conn()
job_run = glue_client.get_job_run(JobName=job_name, RunId=run_id, PredecessorsIncluded=True)
job_run_state = job_run['JobRun']['JobRunState']
return job_run_state
def job_completion(self, job_name: str, run_id: str) -> Dict[str, str]:
"""
Waits until Glue job with job_name completes or
fails and return final state if finished.
Raises AirflowException when the job failed
:param job_name: unique job name per AWS account
:type job_name: str
:param run_id: The job-run ID of the predecessor job run
:type run_id: str
:return: Dict of JobRunState and JobRunId
"""
failed_states = ['FAILED', 'TIMEOUT']
finished_states = ['SUCCEEDED', 'STOPPED']
while True:
job_run_state = self.get_job_state(job_name, run_id)
if job_run_state in finished_states:
self.log.info("Exiting Job %s Run State: %s", run_id, job_run_state)
return {'JobRunState': job_run_state, 'JobRunId': run_id}
if job_run_state in failed_states:
job_error_message = "Exiting Job " + run_id + " Run State: " + job_run_state
self.log.info(job_error_message)
raise AirflowException(job_error_message)
else:
self.log.info(
"Polling for AWS Glue Job %s current run state with status %s", job_name, job_run_state
)
time.sleep(self.JOB_POLL_INTERVAL)
def get_or_create_glue_job(self) -> str:
"""
Creates(or just returns) and returns the Job name
:return:Name of the Job
"""
glue_client = self.get_conn()
try:
get_job_response = glue_client.get_job(JobName=self.job_name)
self.log.info("Job Already exist. Returning Name of the job")
return get_job_response['Job']['Name']
except glue_client.exceptions.EntityNotFoundException:
self.log.info("Job doesn't exist. Now creating and running AWS Glue Job")
if self.s3_bucket is None:
raise AirflowException('Could not initialize glue job, error: Specify Parameter `s3_bucket`')
s3_log_path = f's3://{self.s3_bucket}/{self.s3_glue_logs}{self.job_name}'
execution_role = self.get_iam_execution_role()
try:
create_job_response = glue_client.create_job(
Name=self.job_name,
Description=self.desc,
LogUri=s3_log_path,
Role=execution_role['Role']['Arn'],
ExecutionProperty={"MaxConcurrentRuns": self.concurrent_run_limit},
Command={"Name": "glueetl", "ScriptLocation": self.script_location},
MaxRetries=self.retry_limit,
AllocatedCapacity=self.num_of_dpus,
**self.create_job_kwargs,
)
return create_job_response['Name']
except Exception as general_error:
self.log.error("Failed to create aws glue job, error: %s", general_error)
raise
| dhuang/incubator-airflow | airflow/providers/amazon/aws/hooks/glue.py | Python | apache-2.0 | 7,995 | 0.002126 |
#!/usr/bin/env python
import numpy as np
import vtk
def main():
named_colors = vtk.vtkNamedColors()
# Make a 32 x 32 grid.
size = 32
# Define z values for the topography.
# Comment out the following line if you want a different random
# distribution each time the script is run.
np.random.seed(3)
topography = np.random.randint(0, 5, (size, size))
# Define points, triangles and colors
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(3)
points = vtk.vtkPoints()
triangles = vtk.vtkCellArray()
# Build the meshgrid manually.
count = 0
for i in range(size - 1):
for j in range(size - 1):
z1 = topography[i][j]
z2 = topography[i][j + 1]
z3 = topography[i + 1][j]
# Triangle 1
points.InsertNextPoint(i, j, z1)
points.InsertNextPoint(i, (j + 1), z2)
points.InsertNextPoint((i + 1), j, z3)
triangle = vtk.vtkTriangle()
triangle.GetPointIds().SetId(0, count)
triangle.GetPointIds().SetId(1, count + 1)
triangle.GetPointIds().SetId(2, count + 2)
triangles.InsertNextCell(triangle)
z1 = topography[i][j + 1]
z2 = topography[i + 1][j + 1]
z3 = topography[i + 1][j]
# Triangle 2
points.InsertNextPoint(i, (j + 1), z1)
points.InsertNextPoint((i + 1), (j + 1), z2)
points.InsertNextPoint((i + 1), j, z3)
triangle = vtk.vtkTriangle()
triangle.GetPointIds().SetId(0, count + 3)
triangle.GetPointIds().SetId(1, count + 4)
triangle.GetPointIds().SetId(2, count + 5)
count += 6
triangles.InsertNextCell(triangle)
# Add some color.
r = [int(i / float(size) * 255), int(j / float(size) * 255), 0]
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
colors.InsertNextTypedTuple(r)
# Create a polydata object.
trianglePolyData = vtk.vtkPolyData()
# Add the geometry and topology to the polydata.
trianglePolyData.SetPoints(points)
trianglePolyData.GetPointData().SetScalars(colors)
trianglePolyData.SetPolys(triangles)
# Clean the polydata so that the edges are shared!
cleanPolyData = vtk.vtkCleanPolyData()
cleanPolyData.SetInputData(trianglePolyData)
# Use a filter to smooth the data (will add triangles and smooth).
smooth_loop = vtk.vtkLoopSubdivisionFilter()
smooth_loop.SetNumberOfSubdivisions(3)
smooth_loop.SetInputConnection(cleanPolyData.GetOutputPort())
# Create a mapper and actor for smoothed dataset.
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(smooth_loop.GetOutputPort())
actor_loop = vtk.vtkActor()
actor_loop.SetMapper(mapper)
actor_loop.GetProperty().SetInterpolationToFlat()
# Update the pipeline so that vtkCellLocator finds cells!
smooth_loop.Update()
# Define a cellLocator to be able to compute intersections between lines.
# and the surface
locator = vtk.vtkCellLocator()
locator.SetDataSet(smooth_loop.GetOutput())
locator.BuildLocator()
maxloop = 1000
dist = 20.0 / maxloop
tolerance = 0.001
# Make a list of points. Each point is the intersection of a vertical line
# defined by p1 and p2 and the surface.
points = vtk.vtkPoints()
for i in range(maxloop):
p1 = [2 + i * dist, 16, -1]
p2 = [2 + i * dist, 16, 6]
# Outputs (we need only pos which is the x, y, z position
# of the intersection)
t = vtk.mutable(0)
pos = [0.0, 0.0, 0.0]
pcoords = [0.0, 0.0, 0.0]
subId = vtk.mutable(0)
locator.IntersectWithLine(p1, p2, tolerance, t, pos, pcoords, subId)
# Add a slight offset in z.
pos[2] += 0.01
# Add the x, y, z position of the intersection.
points.InsertNextPoint(pos)
# Create a spline and add the points
spline = vtk.vtkParametricSpline()
spline.SetPoints(points)
functionSource = vtk.vtkParametricFunctionSource()
functionSource.SetUResolution(maxloop)
functionSource.SetParametricFunction(spline)
# Map the spline
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(functionSource.GetOutputPort())
# Define the line actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(named_colors.GetColor3d("Red"))
actor.GetProperty().SetLineWidth(3)
# Visualize
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
# Add actors and render
renderer.AddActor(actor)
renderer.AddActor(actor_loop)
renderer.SetBackground(named_colors.GetColor3d("Cornsilk"))
renderWindow.SetSize(800, 800)
renderWindow.Render()
renderer.GetActiveCamera().SetPosition(-32.471276, 53.258788, 61.209332)
renderer.GetActiveCamera().SetFocalPoint(15.500000, 15.500000, 2.000000)
renderer.GetActiveCamera().SetViewUp(0.348057, -0.636740, 0.688055)
renderer.ResetCameraClippingRange()
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
| lorensen/VTKExamples | src/Python/DataManipulation/LineOnMesh.py | Python | apache-2.0 | 5,546 | 0 |
#!/usr/bin/env python3
'''Test for no resigning if the zone is properly signed.'''
from dnstest.utils import set_err
from dnstest.test import Test
import subprocess
def patch_zone(t, server, zone, script):
"""
Update zone file on a master server.
"""
zone = zone[0]
zonefile = "%s/master/%s" % (server.dir, zone.file_name)
modify_script = "%s/modify.sh" % t.data_dir
patch_script = "%s/%s" % (t.data_dir, script)
subprocess.check_call([modify_script, zonefile, patch_script])
t = Test()
server = t.server("knot")
zone = t.zone("example.", storage=".")
server.dnssec_enable = True
server.gen_key(zone, ksk=True)
server.gen_key(zone)
t.link(zone, server)
t.start()
serial = server.zone_wait(zone)
scripts = [
("insensitive RRs", "modify-insensitive.awk", False),
("NSEC RR", "modify-nsec.awk", True),
("LP RR", "modify-lp.awk", True),
]
for name, script, resign in scripts:
t.sleep(1)
server.flush()
server.stop()
patch_zone(t, server, zone, script)
server.start()
new_serial = server.zone_wait(zone)
signed = new_serial != serial
if signed != resign:
set_err("Invalid state after %s change" % name)
break
serial = new_serial
t.stop()
| jkadlec/knot-dns-zoneapi | tests-extra/tests/dnssec/case_sensitivity/test.py | Python | gpl-3.0 | 1,244 | 0.001608 |
"""
WSGI config for WebAppPublic project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Use Whitenoise to serve static files
# See: https://whitenoise.readthedocs.org/
application = DjangoWhiteNoise(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| wesley1001/WeVoteServer | config/wsgi.py | Python | bsd-3-clause | 1,622 | 0 |
# https://oj.leetcode.com/problems/count-and-say/
# 10:56 - 11:11
class Solution:
# @return a string
def countAndSay(self, n):
result = "1"
# n == 1, result == '1', not when n == 0
for i in xrange(1, n):
last, count, nextResult = result[0], 1, ""
for j in xrange(1, len(result)):
curr = result[j]
if last != curr:
nextResult += str(count) + str(last)
count = 0
count += 1
last = curr
nextResult += str(count) + str(last)
result = nextResult
return result
s = Solution()
print s.countAndSay(1), '1'
print s.countAndSay(2), '11'
print s.countAndSay(3), '21'
print s.countAndSay(4), '1211'
print s.countAndSay(5), '111221'
| yaoxuanw007/forfun | leetcode/python/countAndSay.py | Python | mit | 720 | 0.0125 |
# Ramdomly generates data
import json, random, copy
data = {
'tweets': {},
'events': {},
'tweetsHeat': [],
'eventsHeat': []
}
tweetGeo = {
"type": "FeatureCollection",
"features": [],
"id": "tweetsyoulike.c22ab257"
}
tfeature = {
"geometry": {
"type": "Point",
"coordinates": [120.856705, 14.414455]
},
"type": "Feature",
"id": "55cd1bc45882980ff072054c",
"properties": {
"name": "jayzee guevarra",
"time": "Thu Aug 13 22:35:49 +0000 2015",
"importance": 0.2995732273553991,
"text": "Sweat is body fat crying right??? (@ Boulevard Fitness) https://t.co/rbRHRxzqjG",
"media_url": [],
"id": "55cd1bc05882980ff072054b",
"location": "Haiti Cherie"
}
}
eventGeo = {
"type": "FeatureCollection",
"features": [],
"id": "tweetsyoulike.c22ab257"
}
efeature = {
"geometry": {
"type": "Point",
"coordinates": [120.856705, 14.414455]
},
"type": "Feature",
"id": "55cd1bc45882980ff072054c",
"properties": {
"name": "jayzee guevarra",
"time": "Thu Aug 13 22:35:49 +0000 2015",
"text": "Sweat is body fat crying right??? (@ Boulevard Fitness) https://t.co/rbRHRxzqjG",
"media_url": [],
"id": "55cd1bc05882980ff072054b",
"location": "Haiti Cherie"
}
}
for i in range(0, 100):
tfea = copy.deepcopy(tfeature)
tfea['properties']['importance'] = random.random()
coordi = []
coordi.append(tfeature['geometry']['coordinates'][1] + (random.random() - 0.5) * 10)
coordi.append(tfeature['geometry']['coordinates'][0] + (random.random() - 0.5) * 10)
tfea['geometry']['coordinates'][0] = coordi[1]
tfea['geometry']['coordinates'][1] = coordi[0]
tweetGeo['features'].append(tfea)
coordi.append(tfea['properties']['importance'])
data['tweetsHeat'].append(coordi)
efea = copy.deepcopy(efeature)
coordi = []
coordi.append(efeature['geometry']['coordinates'][1] + (random.random() - 0.5) * 10)
coordi.append(efeature['geometry']['coordinates'][0] + (random.random() - 0.5) * 10)
efea['geometry']['coordinates'][0] = coordi[1]
efea['geometry']['coordinates'][1] = coordi[0]
eventGeo['features'].append(efea)
coordi.append(1)
data['eventsHeat'].append(coordi)
data['tweets'] = tweetGeo
data['events'] = eventGeo
f = open("geo.json", "w")
json.dump(data, f)
f.close() | spatial-computing/geotweets | data/tool/test/jsongener.py | Python | mit | 2,441 | 0.003277 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging.config
import os.path
from bndl.util.conf import Config, String
from bndl.util.log import install_trace_logging
from bndl.util.objects import LazyObject
# Expose a global BNDL configuration
conf = LazyObject(Config)
# Configure Logging
logging_conf = String('logging.conf')
install_trace_logging()
logging.captureWarnings(True)
if os.path.exists(conf['bndl.logging_conf']):
logging.config.fileConfig(conf['bndl.logging_conf'], disable_existing_loggers=False)
# BNDL version info
__version_info__ = (0, 7, 0, 'dev2')
__version__ = '.'.join(map(str, __version_info__))
| bndl/bndl | bndl/__init__.py | Python | apache-2.0 | 1,142 | 0.000876 |
# -*- coding: utf-8 -*-
import pymongo
from config import MONGO_STRING
client = pymongo.MongoClient(MONGO_STRING, tz_aware=True)
db = client['yo-water'] | YoApp/yo-water-tracker | db.py | Python | mit | 154 | 0.006494 |
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.onyx import onyx_lldp
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxInterfaceModule(TestOnyxModule):
module = onyx_lldp
def setUp(self):
super(TestOnyxInterfaceModule, self).setUp()
self.mock_get_config = patch.object(
onyx_lldp.OnyxLldpModule, "_get_lldp_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxInterfaceModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
if commands == ['lldp']:
self.get_config.return_value = None
else:
config_file = 'onyx_lldp_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_lldp_no_change(self):
set_module_args(dict())
self.execute_module(changed=False)
def test_lldp_disable(self):
set_module_args(dict(state='absent'))
commands = ['no lldp']
self.execute_module(changed=True, commands=commands)
def test_lldp_enable(self):
set_module_args(dict(state='present'))
commands = ['lldp']
self.execute_module(changed=True, commands=commands)
| photoninger/ansible | test/units/modules/network/onyx/test_onyx_lldp.py | Python | gpl-3.0 | 2,404 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.